bug squashing and composing
							parent
							
								
									7c5f26b15e
								
							
						
					
					
						commit
						12488699e9
					
				
											
												Binary file not shown.
											
										
									
								| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,59 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 1 ] ], 2 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, 0, 0, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.5 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 0, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 2 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 0 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 0.625 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 0 ], | ||||||
|  |     [ [ [ -1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.25 ], | ||||||
|  |     [ [ [ -1, -1, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 0 ], | ||||||
|  |     [ [ [ -1, -1, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 0 ], | ||||||
|  |     [ [ [ -1, -1, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 1 ], [ 2, -1, 0, -2, 2, 0 ] ], 1.25 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 1 ], [ 2, -1, 0, -2, 2, 0 ] ], 1.25 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -1, 2, 0 ], [ 2, -1, 0, -2, 2, 0 ] ], 1.125 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 3, 0 ] ], 2.25 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ "Rest" ], [ 1, -1, 1, -2, 3, 0 ] ], 1.5 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ "Rest" ], [ 1, -1, 1, -2, 3, 0 ] ], 0.875 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ "Rest" ], [ "Rest" ] ], 1.125 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 3.25 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ -1, -1, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ -1, -1, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 1 ], [ 2, -1, 0, -2, 2, 0 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 1 ], [ 2, -1, 0, -2, 2, 0 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -1, 2, 0 ], [ 2, -1, 0, -2, 2, 0 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 3, 0 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "4200a90d", | ||||||
|  | "ref_uid": "6d635e88", | ||||||
|  | "order_seed": 516056, | ||||||
|  | "dur_seed": 358555, | ||||||
|  | "motifs_seed": 168145, | ||||||
|  | "entrances_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.82, 2.0054945054945, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0.29, 0, 1.1111111111111, 0.65934065934066, 1.37, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.82, 2.0054945054945, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -2411.1455108359, -850.77399380805 ], [ -1872, 450 ], [ -479, 1304.0247678019 ], [ -368, 1173.9938080495 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.33950617283951, 0, 0.72839506172839, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.35, 0.42, 0.75, 0.9, 0.93 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 3 ], [ 2 ], [ 0, 1 ] ], | ||||||
|  |   [ [ 1 ], [ 2, 0, 2, 0, 0, 2, 3, 0, 2, 3 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0.41, 0, 0 ], | ||||||
|  | "order_size": [ 2, 6 ], | ||||||
|  | "passages_size": [ 0, 5 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { ais'1^\markup { \pad-markup #0.2 "+41"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'2 ~ ais'8.[ a'16^\markup { \pad-markup #0.2 "-37"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 5↓" }}] ~ a'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'2 fis'2^\markup { \pad-markup #0.2 "+1"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 11↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'2. ~ fis'16[ r8.] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1^\markup { \pad-markup #0.2 "-46"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 5↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'2. f'4^\markup { \pad-markup #0.2 "+47"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 3↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'16[ gis'8.^\markup { \pad-markup #0.2 "-49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 3↑" }}] ~ gis'4 ~ gis'8.[ a'16^\markup { \pad-markup #0.2 "-10"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 1↑" }}] ~ a'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'2. ~ a'8.[ ais'16^\markup { \pad-markup #0.2 "+18"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↑" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'2 ~ ais'8[ r8] r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2.  c'4^\markup { \pad-markup #0.2 "+49"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'4 ~ c'8[ r8] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2.  c4^\markup { \pad-markup #0.2 "+49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 1↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c16[ ais,8.^\markup { \pad-markup #0.2 "+18"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↑" }}] ~ ais,4 ~ ais,8.[ a,16^\markup { \pad-markup #0.2 "-10"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 13↑" }}] ~ a,4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a,4 ~ a,16[ g,8.^\markup { \pad-markup #0.2 "-2"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 11↓" }}] ~ g,2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g,1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g,1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g,4 ~ g,8[ r8] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,48 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 2.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 3, -3, 0, -1, 1, 0 ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 3, -3, 0, -1, 1, 0 ] ], 1.5 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 0, -1, 2, 0 ] ], 4.125 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 2, -2, 0, -1, 2, 0 ] ], 0.875 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ], [ 2, -2, 0, -1, 2, 0 ] ], 1 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 0.875 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 7.875 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 1, -1, 0, 0, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 3, -3, 0, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 3, -3, 0, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 0, -1, 2, 0 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "43b009ff", | ||||||
|  | "ref_uid": 62820081, | ||||||
|  | "order_seed": 216475, | ||||||
|  | "dur_seed": 323751, | ||||||
|  | "motifs_seed": 466146, | ||||||
|  | "entrances_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -3600, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.14197530864198, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.75 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 1, 0 ], [ 3, 2, 3 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0, 0.65, 0 ], | ||||||
|  | "order_size": [ 1, 1 ], | ||||||
|  | "passages_size": [ 1, 6 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,26 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r4 b'2.^\markup { \pad-markup #0.2 "-23"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 3↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { b'2. ~ b'8[ b'8^\markup { \pad-markup #0.2 "+30"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 11↑" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { b'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { b'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { b'2. ~ b'8[ r8] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,26 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r8[ gis'8^\markup { \pad-markup #0.2 "+10"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↓" }}] ~ gis'2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'2. ~ gis'8.[ r16] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,26 @@ | |||||||
|  | {   | ||||||
|  |  { fis'1^\markup { \pad-markup #0.2 "-21"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'4 ~ fis'8[ r8] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,26 @@ | |||||||
|  | {   | ||||||
|  |  { r2.  r8[ cis'8^\markup { \pad-markup #0.2 "-19"}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'4 ~ cis'16[ r8.] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,54 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 1 ] ], 1.75 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.5 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 0 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 0 ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 2, -2, 2, 0 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 2, -2, 2, 0 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ -1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 0.625 ], | ||||||
|  |     [ [ [ 0, -1, 0, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 2.125 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 1.125 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ "Rest" ], [ "Rest" ] ], 0.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 6.5 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 2, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 2, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ -1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 0, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "443ec222", | ||||||
|  | "ref_uid": "6d635e88", | ||||||
|  | "order_seed": 516056, | ||||||
|  | "dur_seed": 358555, | ||||||
|  | "motifs_seed": 962315, | ||||||
|  | "entrances_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.47, 1.62, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0.29, 0, 1.1111111111111, 0.65934065934066, 1.37, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.47, 1.62, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -2411.1455108359, -850.77399380805 ], [ -1872, 450 ], [ -479, 1304.0247678019 ], [ -368, 1173.9938080495 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.33950617283951, 0, 0.72839506172839, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.35, 0.42, 0.75, 0.9, 0.93 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 3 ], [ 2 ], [ 0, 1 ] ], | ||||||
|  |   [ [ 1 ], [ 2, 0, 2, 0, 0 ], [ 3 ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0.41, 0, 0 ], | ||||||
|  | "order_size": [ 2, 6 ], | ||||||
|  | "passages_size": [ 0, 5 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,20 @@ | |||||||
|  | {   | ||||||
|  |  { ais'1^\markup { \pad-markup #0.2 "+41"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'2 ~ ais'8[ r8] r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,20 @@ | |||||||
|  | {   | ||||||
|  |  { r2.  r8[ d'8^\markup { \pad-markup #0.2 "+0"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 13↓" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d'4 ~ d'8[ e'8^\markup { \pad-markup #0.2 "+36"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 5↑" }}] ~ e'4 f'4^\markup { \pad-markup #0.2 "+47"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 3↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'16[ r8.] r2.  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,20 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  r8[ c'8^\markup { \pad-markup #0.2 "+49"}] ~ c'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'4 ~ c'8[ r8] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,20 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r4 r8[ c8^\markup { \pad-markup #0.2 "+49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 1↑" }}] ~ c2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c8[ ais,8^\markup { \pad-markup #0.2 "+18"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↑" }}] ~ ais,8.[ a,16^\markup { \pad-markup #0.2 "-37"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 5↓" }}] ~ a,2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a,2 r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,943 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	File.mkdir(resourceDir); | ||||||
|  | 	ledger.do({arg id; | ||||||
|  | 		File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,44 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, -1, 0, -1, 1, -1 ], [ "Rest" ] ], 0.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 1, -1 ], [ "Rest" ] ], 1.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 1, -1 ], [ 3, -1, 0, -2, 1, -1 ] ], 1.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 1, -1 ], [ 3, -1, 0, -1, 1, -2 ] ], 11 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 3, -1, 0, -1, 1, -2 ] ], 1.375 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 3, -1, 0, -1, 1, -2 ] ], 1.5 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 6.25 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 1, -1 ], [ 1, 0, 0, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 1, -1 ], [ 3, -1, 0, -2, 1, -1 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 1, -1 ], [ 3, -1, 0, -1, 1, -2 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "4526b76b", | ||||||
|  | "ref_uid": "6fb60ab6", | ||||||
|  | "order_seed": 785868, | ||||||
|  | "dur_seed": 994115, | ||||||
|  | "motifs_seed": 805843, | ||||||
|  | "entrances_probs_vals": [ 0.34, 0, 10, 0.5, 2, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0.34, 0, 10, 0.5, 2.12, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0.34, 0, 10, 0.5, 2.12, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -3600, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.14197530864198, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.75 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 2, 1 ], [ 3, 3 ], [ 0 ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0.75, 0.25, 0.25 ], | ||||||
|  | "order_size": [ 1, 4 ], | ||||||
|  | "passages_size": [ 0, 3 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,48 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 3.625 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 3, -2, -1, -1, 1, 0 ] ], 1.875 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 3, -2, -1, -1, 1, 0 ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 3, -1, 0, -2, 1, 0 ] ], 3.375 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ "Rest" ], [ 2, -1, -1, -1, 1, 0 ], [ 3, -1, 0, -2, 1, 0 ] ], 1.75 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 2, -1, -1, -1, 1, 0 ], [ 3, -1, 0, -2, 1, 0 ] ], 0.75 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 3, -1, 0, -2, 1, 0 ] ], 1.125 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 4.25 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ], [ 3, -2, 0, -1, 0, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ], [ 2, -1, 0, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ], [ 3, -2, -1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 3, -2, -1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 3, -1, 0, -2, 1, 0 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "4b7745df", | ||||||
|  | "ref_uid": "7d3c9a80", | ||||||
|  | "order_seed": 216475, | ||||||
|  | "dur_seed": 241788, | ||||||
|  | "motifs_seed": 440693, | ||||||
|  | "entrances_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -3600, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.14197530864198, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.75 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 1, 0 ], [ 3, 2, 3 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0, 0.65, 0 ], | ||||||
|  | "order_size": [ 1, 2 ], | ||||||
|  | "passages_size": [ 1, 6 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  r16[ d''8.^\markup { \pad-markup #0.2 "-8"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 5↓" }}] ~ d''4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d''1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d''4 ~ d''8[ dis''8^\markup { \pad-markup #0.2 "+12"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 7↓" }}] ~ dis''2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis''1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis''1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis''2. ~ dis''8[ r8] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  a'2^\markup { \pad-markup #0.2 "-6"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 5↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'4 ~ a'16[ r8.] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { fis'1^\markup { \pad-markup #0.2 "-21"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'16[ r8.] r2.  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r2.  cis'4^\markup { \pad-markup #0.2 "-19"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'2. ~ cis'8.[ r16] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,52 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 1 ] ], 1.75 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, 0, 0, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.5 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, 0, 0, -2, 1, 1 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 0, -2, 1, 1 ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 3, 0 ], [ "Rest" ] ], 1.375 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ "Rest" ], [ "Rest" ] ], 1.125 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 0.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 3.375 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 0, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 3, 0 ], [ 1, 0, 1, -2, 1, 1 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "4e7d35e5", | ||||||
|  | "ref_uid": "6d635e88", | ||||||
|  | "order_seed": 516056, | ||||||
|  | "dur_seed": 358555, | ||||||
|  | "motifs_seed": 595740, | ||||||
|  | "entrances_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.47, 1.62, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0.29, 0, 1.1111111111111, 0.65934065934066, 1.37, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.47, 1.62, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -2411.1455108359, -850.77399380805 ], [ -1872, 450 ], [ -479, 1304.0247678019 ], [ -368, 1173.9938080495 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.33950617283951, 0, 0.72839506172839, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.35, 0.42, 0.75, 0.9, 0.93 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 3 ], [ 2 ], [ 0, 1 ] ], | ||||||
|  |   [ [ 1 ], [ 2, 0, 2 ], [ 3 ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0.41, 0, 0 ], | ||||||
|  | "order_size": [ 2, 6 ], | ||||||
|  | "passages_size": [ 0, 5 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { ais'1^\markup { \pad-markup #0.2 "+41"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'2 ~ ais'8[ r8] r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r2.  r8[ g'8^\markup { \pad-markup #0.2 "-46"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 5↓" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'4 ~ g'8[ gis'8^\markup { \pad-markup #0.2 "-49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 3↑" }}] ~ gis'4 fis'4^\markup { \pad-markup #0.2 "+1"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 11↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'4 ~ fis'8.[ r16] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  r8[ c'8^\markup { \pad-markup #0.2 "+49"}] ~ c'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r4 r8[ c8^\markup { \pad-markup #0.2 "+49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 1↑" }}] ~ c2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c4 ~ c16[ r8.] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,55 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 1 ] ], 2 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 0, 0, 1, -1, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.5 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 0, 0, 1, -1, 1, 1 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 0, 1, -1, 1, 1 ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ -1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 0.625 ], | ||||||
|  |     [ [ [ 0, -1, 0, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 1.25 ], | ||||||
|  |     [ [ [ 0, -1, 0, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 2, -1 ], [ "Rest" ] ], 2.25 ], | ||||||
|  |     [ [ [ 0, -1, 0, -2, 2, 0 ], [ "Rest" ], [ 2, -1, 1, -2, 2, -1 ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ 0, -1, 0, -2, 2, 0 ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 0.875 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 3.0 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ -1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 0, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 0, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 2, -1 ], [ 1, 0, 1, -2, 1, 1 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "52c9a980", | ||||||
|  | "ref_uid": "6d635e88", | ||||||
|  | "order_seed": 516056, | ||||||
|  | "dur_seed": 358555, | ||||||
|  | "motifs_seed": 210544, | ||||||
|  | "entrances_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.82, 2.0054945054945, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0.29, 0, 1.1111111111111, 0.65934065934066, 1.37, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.82, 2.0054945054945, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -2411.1455108359, -850.77399380805 ], [ -1872, 450 ], [ -479, 1304.0247678019 ], [ -368, 1173.9938080495 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.33950617283951, 0, 0.72839506172839, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.35, 0.42, 0.75, 0.9, 0.93 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 3 ], [ 2 ], [ 0, 1 ] ], | ||||||
|  |   [ [ 1 ], [ 2, 0, 2, 0, 0, 2 ], [ 3 ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0.41, 0, 0 ], | ||||||
|  | "order_size": [ 2, 6 ], | ||||||
|  | "passages_size": [ 0, 5 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,20 @@ | |||||||
|  | {   | ||||||
|  |  { ais'1^\markup { \pad-markup #0.2 "+41"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'2. r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,20 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1^\markup { \pad-markup #0.2 "+9"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 7↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'2 g'4^\markup { \pad-markup #0.2 "-2"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 11↓" }} ~ g'8[ f'8^\markup { \pad-markup #0.2 "+47"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 3↓" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'8.[ e'16^\markup { \pad-markup #0.2 "+9"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 13↓" }}] ~ e'2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { e'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { e'16[ r8.] r2.  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,20 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2.  c'4^\markup { \pad-markup #0.2 "+49"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'4 ~ c'16[ r8.] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,20 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  c2^\markup { \pad-markup #0.2 "+49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 1↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c4 ais,4^\markup { \pad-markup #0.2 "+18"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↑" }} ~ ais,16[ a,8.^\markup { \pad-markup #0.2 "-37"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 5↓" }}] ~ a,4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a,1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a,1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a,2 r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,48 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 4.375 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 1, -1, 0, 0, 1, 0 ] ], 1.625 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 1, -1, 0, 0, 1, 0 ] ], 1.25 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 3.375 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 1.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 2, -1, -1, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 1.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 2, -1, -1, -1, 1, 0 ], [ "Rest" ] ], 0.875 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 10.0 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 1, -1, 0, 0, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 1, -1, 0, 0, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "62820081", | ||||||
|  | "ref_uid": "79e0a4a7", | ||||||
|  | "order_seed": 216475, | ||||||
|  | "dur_seed": 914627, | ||||||
|  | "motifs_seed": 252655, | ||||||
|  | "entrances_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -3600, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.14197530864198, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.75 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 1, 0 ], [ 3, 2, 3 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0, 0.65, 0 ], | ||||||
|  | "order_size": [ 1, 1 ], | ||||||
|  | "passages_size": [ 1, 6 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,943 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	File.mkdir(resourceDir); | ||||||
|  | 	ledger.do({arg id; | ||||||
|  | 		File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,48 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.125 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 4.375 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 1, -1, 0, 0, 1, 0 ] ], 1.375 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 2, 0 ], [ 1, -1, 0, 0, 1, 0 ] ], 1.25 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 2, 0 ], [ 2, -2, 0, -1, 2, 0 ] ], 2.375 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 2, 0 ], [ "Rest" ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.125 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 5.0 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 1, -1, 0, 0, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 2, 0 ], [ 1, -1, 0, 0, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 2, 0 ], [ 2, -2, 0, -1, 2, 0 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "631e2af1", | ||||||
|  | "ref_uid": "79e0a4a7", | ||||||
|  | "order_seed": 216475, | ||||||
|  | "dur_seed": 698877, | ||||||
|  | "motifs_seed": 336611, | ||||||
|  | "entrances_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -3600, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.14197530864198, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.75 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 1, 0 ], [ 3, 2, 3 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0, 0.65, 0 ], | ||||||
|  | "order_size": [ 1, 1 ], | ||||||
|  | "passages_size": [ 1, 6 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,95 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 1.625 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ "Rest" ], [ 2, -1, 1, -2, 1, 0 ], [ "Rest" ] ], 3.125 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 1, -2, 0, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ "Rest" ] ], 1.125 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ "Rest" ] ], 3.5 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 3.25 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 1, -2, 1, -1, 1, 1 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 1.375 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 1, -1, 1, -1, 1, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 2.75 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 1, -2, 1, -1 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 1.875 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 1, 0, 1, -2, 1, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 2.875 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 1, 0, 1, -2, 1, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 2 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -2, 1, -1, 2, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 3.125 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 1.25 ], | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ "Rest" ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 3.875 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ 1, -2, 1, -1, 2, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 1.875 ], | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ 1, -2, 1, -1, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 2 ], | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ 1, -2, 1, -1, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 2.75 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ 1, -2, 1, -1, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -1, 1, 0 ] ], 1.875 ], | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ 1, -2, 1, -1, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 2.875 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ 2, -2, 0, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.25 ], | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], 3.25 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ 0, 0, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.125 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 1, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 2 ], | ||||||
|  |     [ [ [ -1, 1, 1, -2, 1, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 4 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ -1, 0, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.875 ], | ||||||
|  |     [ [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 2.25 ], | ||||||
|  |     [ [ [ 1, -1, 1, -3, 2, 0 ], [ "Rest" ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 1, -3, 2, 0 ], [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 1 ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 1, -3, 2, 0 ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 1.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 4.0 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 0, 0, 1, -2, 1, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 1, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ -1, 1, 1, -2, 1, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ -1, 0, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 0, 1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "6d635e88", | ||||||
|  | "ref_uid": "6ed95c4c", | ||||||
|  | "order_seed": 526896, | ||||||
|  | "dur_seed": 815251, | ||||||
|  | "motifs_seed": 342685, | ||||||
|  | "entrances_probs_vals": [ 0, 0.91269841269841, 2.8571428571429, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0, 0.63, 2.1031746031746, 0.98901098901099, 2.23, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0, 0.91269841269841, 2.8571428571429, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -2894.1176470588, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.23045267489712, 1.1102230246252e-16, 0.61522633744856, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.8 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 0, 2 ], [ 1, 1 ], [ 3 ] ], | ||||||
|  |   [ [ 0, 2, 3 ], [ 1, 1 ], [  ] ], | ||||||
|  |   [ [ 2, 0, 3 ], [ 1, 1 ], [  ] ], | ||||||
|  |   [ [ 2, 3 ], [ 1 ], [ 0 ] ], | ||||||
|  |   [ [ 3, 2 ], [ 0 ], [ 1 ] ], | ||||||
|  |   [ [ 1, 3, 0 ], [ 2, 2 ], [  ] ], | ||||||
|  |   [ [ 0, 2, 1 ], [ 3, 3 ], [  ] ], | ||||||
|  |   [ [ 2, 0, 3 ], [ 1, 1 ], [  ] ], | ||||||
|  |   [ [ 3, 1 ], [ 2, 0, 0 ], [  ] ], | ||||||
|  |   [ [ 2, 3, 1 ], [ 0, 0 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0, 0.31, 0.55 ], | ||||||
|  | "order_size": [ 10, 10 ], | ||||||
|  | "passages_size": [ 0, 2 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,72 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  r8.[ ais'16^\markup { \pad-markup #0.2 "-35"}] ~ ais'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'8.[ c''16^\markup { \pad-markup #0.2 "-31"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 7↑" }}] ~ c''2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c''8[ ais'8^\markup { \pad-markup #0.2 "+41"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 13↑" }}] ~ ais'2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'8.[ r16] r2.  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,72 @@ | |||||||
|  | {   | ||||||
|  |  { r2.  r16[ g'8.^\markup { \pad-markup #0.2 "-2"}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'2. ~ g'16[ gis'8.^\markup { \pad-markup #0.2 "-49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 11↑" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'2. ~ gis'16[ f'8.^\markup { \pad-markup #0.2 "+47"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↓" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'2. ~ f'16[ f'8.^\markup { \pad-markup #0.2 "+42"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 3↑" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'4 ~ f'16[ r8.] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,72 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r4 r8[ fis'8^\markup { \pad-markup #0.2 "+45"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 11↓" }}] ~ fis'4 ~ fis'8.[ gis'16^\markup { \pad-markup #0.2 "+10"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 5↓" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'4 ~ gis'16[ fis'8.^\markup { \pad-markup #0.2 "+5"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 13↑" }}] ~ fis'2 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1^\markup { \pad-markup #0.2 "-33"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 7↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'4 ~ f'8[ e'8^\markup { \pad-markup #0.2 "-44"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 13↓" }}] ~ e'2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { e'4 ~ e'16[ d'8.^\markup { \pad-markup #0.2 "+0"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 3↑" }}] ~ d'2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d'2. dis'4^\markup { \pad-markup #0.2 "+16"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 11↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'4 ~ dis'16[ r8.] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2.  r8[ dis'8^\markup { \pad-markup #0.2 "+16"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 11↑" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'2 ~ dis'16[ d'8.^\markup { \pad-markup #0.2 "-39"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 5↓" }}] ~ d'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d'8.[ c'16^\markup { \pad-markup #0.2 "+49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 3↑" }}] ~ c'2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'4 ~ c'8.[ r16] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,72 @@ | |||||||
|  | {   | ||||||
|  |  { c1^\markup { \pad-markup #0.2 "-4"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c2. r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2.  r8.[ d16^\markup { \pad-markup #0.2 "+0"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 3↑" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d4 ~ d8[ dis8^\markup { \pad-markup #0.2 "+39"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 3↓" }}] ~ dis2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis4 ~ dis8[ f8^\markup { \pad-markup #0.2 "+42"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 1↑" }}] ~ f2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f4 ~ f8[ e8^\markup { \pad-markup #0.2 "-8"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 11↑" }}] ~ e2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { e4 ~ e16[ dis8.^\markup { \pad-markup #0.2 "-19"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↓" }}] ~ dis2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis1 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,67 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 4.125 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 2, -2, 0, -1, 1, 1 ] ], 1.375 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 0, -1, 1, 1 ] ], 1.25 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -1, 0, -1, 1, 0 ] ], 1.125 ], | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ], [ 2, -1, 0, -1, 1, 0 ] ], 3.5 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ 1, -3, 1, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ], [ "Rest" ] ], 1.625 ], | ||||||
|  |     [ [ [ 1, -3, 1, -1, 1, 0 ], [ 1, -2, 1, -1, 1, 1 ], [ 2, -2, 1, -1, 1, 0 ], [ "Rest" ] ], 2.25 ], | ||||||
|  |     [ [ [ 1, -3, 1, -1, 1, 0 ], [ 1, -1, 1, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ 0, -2, 2, -1, 1, 0 ], [ 1, -1, 1, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ], [ "Rest" ] ], 2.125 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 1, -1, 1, -1, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ], [ "Rest" ] ], 4.25 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 1, -1, 1, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ "Rest" ] ], 2 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 1, -2, 1, -1 ], [ 3, -2, 0, -2, 1, 0 ], [ "Rest" ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 1, -2, 1, -1 ], [ 3, -2, 0, -2, 1, 0 ], [ 3, -2, 1, -2, 1, 0 ] ], 1 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 1, -2, 1, -1 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 1.125 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 2, -2, 2, -2, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -3, 1, -2, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 2 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -3, 1, -2, 1, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], 3.875 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -3, 1, -2, 1, 0 ], [ "Rest" ], [ 2, -2, 1, -1, 1, 0 ] ], 1 ], | ||||||
|  |     [ [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -3, 1, -2, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 0.875 ], | ||||||
|  |     [ [ [ "Rest" ], [ 3, -3, 1, -2, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 7.875 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 1, -2, 1, -1 ], [ 3, -2, 0, -2, 1, 0 ], [ 3, -2, 1, -2, 1, 0 ] ], | ||||||
|  |   [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -2, 1, -2, 1, -1 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -2, 1, -2, 1, 0 ], [ 2, -2, 2, -2, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -3, 1, -2, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -2, 1, -2, 1, 0 ], [ 3, -3, 1, -2, 1, 0 ], [ 2, -1, 1, -2, 1, 0 ], [ 2, -2, 1, -1, 1, 0 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "6ed95c4c", | ||||||
|  | "ref_uid": "4b7745df", | ||||||
|  | "order_seed": 602538, | ||||||
|  | "dur_seed": 495773, | ||||||
|  | "motifs_seed": 128841, | ||||||
|  | "entrances_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0, 1.55, 3.452380952381, 0.98901098901099, 2.23, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -2894.1176470588, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.14197530864198, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.8 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 1 ], [ 3, 2, 3, 2 ], [ 0 ] ], | ||||||
|  |   [ [ 2 ], [ 0, 1, 1, 0, 0 ], [ 3 ] ], | ||||||
|  |   [ [ 0 ], [ 2, 1, 3, 3, 1, 1, 2 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0.66, 0, 0 ], | ||||||
|  | "order_size": [ 1, 4 ], | ||||||
|  | "passages_size": [ 2, 5 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,52 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r16[ d''8.^\markup { \pad-markup #0.2 "+19"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 13↑" }}] ~ d''2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d''4 ~ d''8[ cis''8^\markup { \pad-markup #0.2 "-19"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 3↑" }}] ~ cis''2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis''1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis''2 ~ cis''8.[ r16] r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r8.[ c''16^\markup { \pad-markup #0.2 "-4"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 1↑" }}] ~ c''4 ~ c''8.[ ais'16^\markup { \pad-markup #0.2 "-35"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 7↑" }}] ~ ais'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'2 ~ ais'16[ r8.] r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,52 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2.  gis'4^\markup { \pad-markup #0.2 "+10"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'2. ~ gis'8.[ ais'16^\markup { \pad-markup #0.2 "-35"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 5↑" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'4 ~ ais'16[ gis'8.^\markup { \pad-markup #0.2 "+10"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 5↓" }}] ~ gis'2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'8[ g'8^\markup { \pad-markup #0.2 "-2"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 3↑" }}] ~ g'2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { g'16[ r8.] r2.  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,52 @@ | |||||||
|  | {   | ||||||
|  |  { fis'1^\markup { \pad-markup #0.2 "-21"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'4 fis'2.^\markup { \pad-markup #0.2 "+5"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 13↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'4 ~ fis'8[ f'8^\markup { \pad-markup #0.2 "-33"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 3↑" }}] ~ f'2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'4 ~ f'16[ e'8.^\markup { \pad-markup #0.2 "-44"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 13↓" }}] ~ e'2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { e'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { e'4 e'2.^\markup { \pad-markup #0.2 "-18"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 5↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { e'8[ f'8^\markup { \pad-markup #0.2 "-6"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 3↓" }}] ~ f'2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'2. ~ f'16[ r8.] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,52 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r4 r8.[ dis16^\markup { \pad-markup #0.2 "-37"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 3↓" }}] ~ dis2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis8[ d8^\markup { \pad-markup #0.2 "-49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 5↑" }}] ~ d2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { d8.[ c16^\markup { \pad-markup #0.2 "-4"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "II"\normal-size-super " 7↓" }}] ~ c2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c1 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,53 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 1 ] ], 1.75 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, -1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.5 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, -1, 1, -2, 1, 1 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 1, 1 ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 2, -1 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 2, -1 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ -1, -1, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ "Rest" ] ], 1.625 ], | ||||||
|  |     [ [ [ -1, -1, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ "Rest" ], [ "Rest" ] ], 1.125 ], | ||||||
|  |     [ [ [ -1, -1, 1, -2, 2, 1 ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 0.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 2.375 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 1, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 2, -1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -1, 1, -2, 2, -1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ -1, -1, 1, -2, 2, 1 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "784130cc", | ||||||
|  | "ref_uid": "6d635e88", | ||||||
|  | "order_seed": 516056, | ||||||
|  | "dur_seed": 358555, | ||||||
|  | "motifs_seed": 830376, | ||||||
|  | "entrances_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.47, 1.62, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0.29, 0, 1.1111111111111, 0.65934065934066, 1.37, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.47, 1.62, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -2411.1455108359, -850.77399380805 ], [ -1872, 450 ], [ -479, 1304.0247678019 ], [ -368, 1173.9938080495 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.33950617283951, 0, 0.72839506172839, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.35, 0.42, 0.75, 0.9, 0.93 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 3 ], [ 2 ], [ 0, 1 ] ], | ||||||
|  |   [ [ 1 ], [ 2, 0, 2, 0 ], [ 3 ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0.41, 0, 0 ], | ||||||
|  | "order_size": [ 2, 6 ], | ||||||
|  | "passages_size": [ 0, 5 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { ais'1^\markup { \pad-markup #0.2 "+41"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'2 ~ ais'8[ r8] r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r2.  r8[ dis'8^\markup { \pad-markup #0.2 "+39"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 3↓" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { dis'4 ~ dis'8[ e'8^\markup { \pad-markup #0.2 "+9"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 13↓" }}] ~ e'4 gis'4^\markup { \pad-markup #0.2 "-49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 3↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'2. ~ gis'8.[ r16] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  r8[ c'8^\markup { \pad-markup #0.2 "+49"}] ~ c'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'2 r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r4 r8[ c8^\markup { \pad-markup #0.2 "+49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 1↑" }}] ~ c2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c8[ a,8^\markup { \pad-markup #0.2 "-10"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 13↑" }}] ~ a,2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a,2. ~ a,16[ r8.] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,943 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	File.mkdir(resourceDir); | ||||||
|  | 	ledger.do({arg id; | ||||||
|  | 		File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,47 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 4.25 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 2, -1, -1, -1, 1, 0 ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ] ], 1.5 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ] ], 2.375 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ "Rest" ], [ 1, 0, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ] ], 1.125 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ], [ 3, -2, 0, -1, 1, -1 ] ], 1.5 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 5.0 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 1, -1 ], [ 1, 0, 0, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, -1, 0, -1, 1, -1 ], [ 2, -1, -1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 2, -1, -1, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 1, 0, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "79e0a4a7", | ||||||
|  | "ref_uid": "6fb60ab6", | ||||||
|  | "order_seed": 216475, | ||||||
|  | "dur_seed": 359011, | ||||||
|  | "motifs_seed": 501751, | ||||||
|  | "entrances_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.9230769230769, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.9230769230769, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.9230769230769, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -3600, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.14197530864198, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.75 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 1, 0 ], [ 3, 2, 3 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0, 0.65, 0 ], | ||||||
|  | "order_size": [ 1, 1 ], | ||||||
|  | "passages_size": [ 1, 6 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1^\markup { \pad-markup #0.2 "-6"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 5↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'2 ~ a'8[ a'8^\markup { \pad-markup #0.2 "+38"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 13↓" }}] ~ a'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'8[ r8] r2.  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2.  r8[ gis'8^\markup { \pad-markup #0.2 "-18"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 3↑" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { gis'4 ~ gis'8[ r8] r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { fis'1^\markup { \pad-markup #0.2 "-21"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'2. ~ fis'16[ r8.] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r2.  r8[ cis'8^\markup { \pad-markup #0.2 "-19"}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'2 r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,48 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 1.25 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ] ], 4.25 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ "Rest" ], [ 3, -2, 0, -1, 0, 0 ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ], [ 3, -2, 0, -1, 0, 0 ] ], 0.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ], [ 2, -1, 0, -1, 1, 0 ] ], 3.875 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ "Rest" ], [ 3, -2, 0, -1, 1, -1 ], [ 2, -1, 0, -1, 1, 0 ] ], 1.75 ], | ||||||
|  |     [ [ [ 1, -1, 0, -1, 1, 0 ], [ "Rest" ], [ "Rest" ], [ 2, -1, 0, -1, 1, 0 ] ], 0.75 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 2, -1, 0, -1, 1, 0 ] ], 1.25 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 4.375 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 3, -3, 0, -1, 1, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 2, -2, 0, -1, 2, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -2, 1, 0 ], [ 3, -2, 0, -1, 0, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ], [ 3, -2, 0, -1, 0, 0 ] ], | ||||||
|  |   [ [ 1, -1, 0, -1, 1, 0 ], [ 2, -2, 0, -1, 1, 0 ], [ 3, -2, 0, -1, 1, -1 ], [ 2, -1, 0, -1, 1, 0 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "7d3c9a80", | ||||||
|  | "ref_uid": "43b009ff", | ||||||
|  | "order_seed": 216475, | ||||||
|  | "dur_seed": 155918, | ||||||
|  | "motifs_seed": 903149, | ||||||
|  | "entrances_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0, 1.1904761904762, 3.3333333333333, 0.71, 1.92, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -3600, -312 ], [ -1872, 1378 ], [ -145, 1583 ], [ -182, 1527 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.14197530864198, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.75, 0.75, 0.75, 0.75, 0.75 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 1, 0 ], [ 3, 2, 3 ], [  ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0, 0.65, 0 ], | ||||||
|  | "order_size": [ 1, 1 ], | ||||||
|  | "passages_size": [ 1, 6 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2.  c''4^\markup { \pad-markup #0.2 "+27"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 11↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c''1 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis''1^\markup { \pad-markup #0.2 "-19"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 1↑" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis''1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis''1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis''2. ~ cis''16[ r8.] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  r8[ a'8^\markup { \pad-markup #0.2 "+38"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 13↓" }}] ~ a'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { a'2. ~ a'16[ r8.] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { fis'1^\markup { \pad-markup #0.2 "-21"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { fis'2. ~ fis'8.[ r16] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,24 @@ | |||||||
|  | {   | ||||||
|  |  { r2  r8[ cis'8^\markup { \pad-markup #0.2 "-19"}] ~ cis'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { cis'8.[ r16] r2.  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,945 @@ | |||||||
|  | ( | ||||||
|  | // helper funcs | ||||||
|  | var hsArrayToCents, pDist, hdSum, hsChordalDistance, hsArrayToFreq; | ||||||
|  | 
 | ||||||
|  | // score funcs | ||||||
|  | var isInRange, spacingScore, rangeScore, intervalScore, inclusionScore; | ||||||
|  | 
 | ||||||
|  | // subroutines | ||||||
|  | var genTuples, initVoices, genOrders, genSubMotif, updateVoices, genDurFunc, genStepFunc; | ||||||
|  | 
 | ||||||
|  | // primary routines | ||||||
|  | var genMotif, genSecondarySeq; | ||||||
|  | 
 | ||||||
|  | // audition funcs | ||||||
|  | var genPatterns, genMidiPatterns; | ||||||
|  | 
 | ||||||
|  | // resource management funcs | ||||||
|  | var seedFunc, genUID, writeResources, stringifyToDepth, setSeeds, sanityCheck, | ||||||
|  | msgInterpret, loadLedgerFile, loadLedgerJSON, loadModelFile, loadModelJSON, | ||||||
|  | setGlobalVars, globalVarsToDict, saveLedger; | ||||||
|  | 
 | ||||||
|  | // model vars | ||||||
|  | //(model and global vars mostly set by OSC funcs | ||||||
|  | var seq, lastXChanges, | ||||||
|  | curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | orders, susWeights, orderSize, passagesSize, | ||||||
|  | motifEdited, orderEdited; | ||||||
|  | 
 | ||||||
|  | // model aux vars | ||||||
|  | var entrancesDurFunc, passagesDurFunc, exitsDurFunc, stepFunc; | ||||||
|  | 
 | ||||||
|  | // other global vars | ||||||
|  | var popSize, exPath, dir, primes, dims, tuples, | ||||||
|  | group, player, resourceDir, ledgerPath, ledger, currentlyPlayingUID, | ||||||
|  | nameSpaces; | ||||||
|  | 
 | ||||||
|  | // install JSON quark (not used) | ||||||
|  | /* | ||||||
|  | if(Quarks.isInstalled("JSONlib").not, { | ||||||
|  | 	Quarks.install("https://github.com/musikinformatik/JSONlib.git"); | ||||||
|  | 	thisProcess.recompile; | ||||||
|  | 	//HelpBrowser.openHelpFor("Classes/JSONlib"); | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------helper funcs | ||||||
|  | 
 | ||||||
|  | hsArrayToCents = { | ||||||
|  | 	arg hsArray; | ||||||
|  | 	hsArray.collect({arg dist, p; dist * 1200 * log2(primes[p][0]/primes[p][1])}).sum | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | pDist = { | ||||||
|  | 	arg array1, array2, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = hsArrayToCents.value(array1) - hsArrayToCents.value(array2); | ||||||
|  | 	if(signed, {pDistance}, {abs(pDistance)}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hdSum = { | ||||||
|  | 	arg hsArrays; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays.size; | ||||||
|  | 	distances = (size - 1).collect({arg i; | ||||||
|  | 		((i + 1)..(size - 1)).collect({arg j; | ||||||
|  | 			abs(hsArrays[i] - hsArrays[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsChordalDistance = { | ||||||
|  | 	arg hsArrays1, hsArrays2; | ||||||
|  | 	var size, distances, mean; | ||||||
|  | 	size = hsArrays1.size; | ||||||
|  | 	distances = hsArrays1.size.collect({arg i; | ||||||
|  | 		hsArrays2.size.collect({arg j; | ||||||
|  | 			abs(hsArrays1[i] - hsArrays2[j]).collect({arg dist, p; dist * log2(primes[p].product)}).sum | ||||||
|  | 		}); | ||||||
|  | 	}).flat; | ||||||
|  | 	mean = distances.sum / distances.size; | ||||||
|  | 	distances.sum | ||||||
|  | 	//mean + ((1 / sqrt((pow(distances - mean, 2)).sum / distances.size)) * mean) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | hsArrayToFreq = { | ||||||
|  | 	arg array; | ||||||
|  | 	array.collect({arg dim, d; pow(primes[d][0]/primes[d][1], dim)}).product | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------score funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | isInRange = { | ||||||
|  | 	arg hsArray, min, max; | ||||||
|  | 	var cents; | ||||||
|  | 	cents = hsArrayToCents.value(hsArray); | ||||||
|  | 	(cents >= min) && (cents <= max) | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | spacingScore = { | ||||||
|  | 	arg hsArrays, min; | ||||||
|  | 	var centsArray; | ||||||
|  | 	centsArray = hsArrays.collect({arg hsArray; hsArrayToCents.value(hsArray)}).sort({arg a, b; a < b}); | ||||||
|  | 	centsArray.differentiate.drop(1).collect({arg pDistance; if(pDistance >= min, {1}, {0.01})}).sum; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | rangeScore = { | ||||||
|  | 	arg hsArray1, hsArray2, min, max, low, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	if((pDistance >= min) && (pDistance <= max), {1}, {low}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | intervalScore = { | ||||||
|  | 	arg hsArray1, hsArray2, mean, sd, signed = false; | ||||||
|  | 	var pDistance; | ||||||
|  | 	pDistance = pDist.value(hsArray1, hsArray2, signed); | ||||||
|  | 	//pDistance.gaussCurve(1, mean, sd) | ||||||
|  | 	stepFunc.value(pDistance); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | inclusionScore = { | ||||||
|  | 	arg array, test, min = 0.01; | ||||||
|  | 	if(array.collect({arg v; v.hash}).includes(test.hash), {min}, {1}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------subroutines | ||||||
|  | 
 | ||||||
|  | genTuples = { | ||||||
|  | 	var tuples; | ||||||
|  | 	tuples = dims.collect({[-1, 0, 1]}).allTuples.select({arg tuple; (abs(tuple.drop(1)).sum <= 1) && (tuple[0] == 0)}); | ||||||
|  | 	tuples = tuples ++ tuples.collect({arg tuple; [-3, -2, -1, 1, 2, 3].collect({arg octTrans; tuple.deepCopy.put(0, octTrans)})}).flatten; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | initVoices = { | ||||||
|  | 	var init, voicesInit; | ||||||
|  | 	voicesInit = popSize.collect({dims.collect({0})}); | ||||||
|  | 	/* | ||||||
|  | 	voicesInit = [dims.collect({0})]; | ||||||
|  | 	(popSize - 1).do({ | ||||||
|  | 		arg rep, new; | ||||||
|  | 		rep = dims.rand; | ||||||
|  | 		new = voicesInit.last.deepCopy; | ||||||
|  | 		new[rep] = new[rep] + [-1, 1].choose(); | ||||||
|  | 		voicesInit = voicesInit.add(new); | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 	voicesInit.deepCopy; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genDurFunc = {arg chordProb, minPad, maxPad, minDur, maxDur, envData, seed; | ||||||
|  | 	var env, pTable, durFunc; | ||||||
|  | 	env = Env.pairs([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).asSignal(256).asList.asArray; | ||||||
|  | 	pTable = env.asRandomTable; | ||||||
|  | 	[chordProb, minPad, maxPad, minDur, maxDur, envData].postln; | ||||||
|  | 	durFunc = {arg allowChord, pad = false; | ||||||
|  | 		var res; | ||||||
|  | 		res = if(allowChord.not, { | ||||||
|  | 			pTable.tableRand * (maxDur - minDur) + minDur | ||||||
|  | 		}, { | ||||||
|  | 			if(1.0.rand < chordProb, {0}, {pTable.tableRand * (maxDur - minDur) + minDur}); | ||||||
|  | 		}).round(0.125); | ||||||
|  | 		if(pad, {res = res + rrand(minPad.asFloat, maxPad.asFloat).round(0.125)}); | ||||||
|  | 		if(res.asInteger == res, {res = res.asInteger}); | ||||||
|  | 		res | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(durFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genStepFunc = {arg minStep, maxStep, envData, seed; | ||||||
|  | 	var envDataNorm, env, pTable, stepFunc; | ||||||
|  | 	[minStep, maxStep, envData].postln; | ||||||
|  | 	envDataNorm = ([[0, 0]] ++ envData.clump(2) ++ [[1, 0]]).flop; | ||||||
|  | 	envDataNorm = [envDataNorm[0].normalize(minStep, maxStep), envDataNorm[1]].flop; | ||||||
|  | 	env = Env.pairs(envDataNorm); | ||||||
|  | 	stepFunc = {arg pDist; | ||||||
|  | 		env.at(pDist).clip(0.001, 1); | ||||||
|  | 	}; | ||||||
|  | 	seedFunc.value(stepFunc, seed); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genOrders = {arg minMotifLength = 1, maxMotifLength = 5, minProgLength = 0, maxProgLength = 5; | ||||||
|  | 	((maxMotifLength - minMotifLength).rand + minMotifLength).collect({ | ||||||
|  | 		var noProgIns, noSusIns, noSilentIns, prog, sus, silent, order; | ||||||
|  | 		noSusIns = [1, 2, 3].wchoose(susWeights.normalizeSum); | ||||||
|  | 		noProgIns = (popSize - noSusIns).rand + 1; | ||||||
|  | 		noSilentIns = popSize - noSusIns - noProgIns; | ||||||
|  | 
 | ||||||
|  | 		# prog, sus, silent = (0..(popSize-1)).scramble.clumps([noProgIns, noSusIns, noSilentIns]); | ||||||
|  | 
 | ||||||
|  | 		prog  = (prog.scramble ++ ((maxProgLength - minProgLength).rand + minProgLength).collect({prog.choose}).scramble); | ||||||
|  | 		if(silent == nil, {silent = []}); | ||||||
|  | 		[sus.scramble, prog, silent.scramble] | ||||||
|  | 	}); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | updateVoices = {arg ins, sus; | ||||||
|  | 	var voices, candidates, nWeights, nProbs, sel; | ||||||
|  | 
 | ||||||
|  | 	voices = lastXChanges.deepCopy.last; | ||||||
|  | 
 | ||||||
|  | 	candidates = sus.collect({arg v; tuples.collect({arg t; voices[v] + t})}).flatten; | ||||||
|  | 	candidates = difference(candidates.asSet, voices.asSet).asList; | ||||||
|  | 	nProbs = candidates.collect({arg candidate; | ||||||
|  | 		var stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore; | ||||||
|  | 
 | ||||||
|  | 		//stepScore = intervalScore.value(voices[ins], candidate, 30, 400, 0.1); | ||||||
|  | 		stepScore = intervalScore.value(voices[ins], candidate, 100, 100); | ||||||
|  | 		recentlySoundedScore = inclusionScore.value(lastXChanges.flop[ins], candidate, 0); | ||||||
|  | 		isInRangeScore = rangeScore.value(candidate, candidate.collect({0}), ranges[ins][0], ranges[ins][1], 0, true); | ||||||
|  | 		regScore = spacingScore.value(voices.deepCopy.put(ins, candidate), 300); | ||||||
|  | 		hdScore = pow(hdSum.value(voices.deepCopy.put(ins, candidate)), hdExp); | ||||||
|  | 		if(hdInvert == 0, {hdScore = 1/hdScore}); | ||||||
|  | 		//maybe what you want here is a vector to another root and then favoring movement towards it. | ||||||
|  | 		//distScore = pow(hsChordalDistance.value(voices, voices.put(ins, candidate)), 2); | ||||||
|  | 
 | ||||||
|  | 		[stepScore, recentlySoundedScore, isInRangeScore, regScore, hdScore] | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	nWeights = passagesWeights; | ||||||
|  | 
 | ||||||
|  | 	//this handles nWeights of 0; mainly for testing | ||||||
|  | 	nProbs = nProbs.flop.select({arg scores, s; nWeights[s] != 0}).flop; | ||||||
|  | 	nWeights = nWeights.select({arg weight; weight != 0}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; | ||||||
|  | 		if(scores.sum == 0, {scores}, {scores.normalizeSum * nWeights[s]}) | ||||||
|  | 	}); | ||||||
|  | 	nProbs = nProbs.flop.collect({arg scores, s; scores.product}).normalizeSum; | ||||||
|  | 
 | ||||||
|  | 	sel = candidates.wchoose(nProbs); | ||||||
|  | 
 | ||||||
|  | 	voices[ins] = sel; | ||||||
|  | 	lastXChanges = lastXChanges.add(voices).keep(-5); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSubMotif = {arg order, orderIndex, lastState, repeatLast = false, startFromLast = false, isLastOrder = false; | ||||||
|  | 	var sus, prog, silent, flatOrder, res, isInChord, allowChord, pad, lastXChangesHold, voices, adder; | ||||||
|  | 	# sus, prog, silent = order; | ||||||
|  | 	flatOrder = silent ++ sus ++ prog; | ||||||
|  | 	lastXChangesHold = lastXChanges.deepCopy; | ||||||
|  | 	voices = lastState.deepCopy; | ||||||
|  | 	isInChord = popSize.collect({false}); | ||||||
|  | 	allowChord = false; | ||||||
|  | 	pad = false; | ||||||
|  | 	res = []; | ||||||
|  | 	"------generating motif".postln; | ||||||
|  | 	//need to figure out here if voices move between motifs | ||||||
|  | 	flatOrder.do({arg ins, i; | ||||||
|  | 
 | ||||||
|  | 		if(prog.includes(ins) && repeatLast.not, {updateVoices.value(ins, sus)}); | ||||||
|  | 		adder = if(silent.includes(ins), {["Rest"]}, {lastXChanges.last.deepCopy[ins]}); | ||||||
|  | 
 | ||||||
|  | 		if(voices[ins] != adder, { | ||||||
|  | 			var dur; | ||||||
|  | 
 | ||||||
|  | 			if((sus ++ silent).includes(ins), { | ||||||
|  | 				allowChord = (ins != sus.last); | ||||||
|  | 				pad = (ins == sus.last); | ||||||
|  | 			}, { | ||||||
|  | 				if(i < (flatOrder.size - 1), { | ||||||
|  | 					allowChord = (isInChord[flatOrder[i + 1]] || (ins == flatOrder[i + 1])).not; | ||||||
|  | 					pad = false; | ||||||
|  | 				}, { | ||||||
|  | 					allowChord = false; | ||||||
|  | 					pad = true | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if((orderIndex == 0) && sus.includes(ins), { | ||||||
|  | 				dur = entrancesDurFunc.value(allowChord, pad); | ||||||
|  | 			}, { | ||||||
|  | 				dur = passagesDurFunc.value(allowChord, pad); | ||||||
|  | 			}); | ||||||
|  | 			if(dur == 0, {isInChord[ins] = true}, {isInChord = popSize.collect({false})}); | ||||||
|  | 
 | ||||||
|  | 			voices[ins] = adder; | ||||||
|  | 			res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	// pad ending | ||||||
|  | 	if(orderIndex == (orders.size - 1), { | ||||||
|  | 		(0..(popSize-1)).scramble.do({arg ins; | ||||||
|  | 			if(res.last.first[ins] != ["Rest"], { | ||||||
|  | 				var dur; | ||||||
|  | 				voices[ins] = ["Rest"]; | ||||||
|  | 				allowChord = (voices != popSize.collect({["Rest"]})); | ||||||
|  | 				pad = allowChord.not; | ||||||
|  | 				dur = exitsDurFunc.value(allowChord, pad); | ||||||
|  | 				res = res.add([voices.deepCopy.postln, dur]); | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//format and return | ||||||
|  | 	if(startFromLast, {lastXChanges = lastXChangesHold.deepCopy}); | ||||||
|  | 	res; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------primary routines | ||||||
|  | 
 | ||||||
|  | genMotif = { | ||||||
|  | 	var repeats, fSeq, fDur, durAdd; | ||||||
|  | 
 | ||||||
|  | 	repeats = 1; | ||||||
|  | 	fSeq = []; | ||||||
|  | 
 | ||||||
|  | 	repeats.do({arg index; | ||||||
|  | 		var motif; | ||||||
|  | 
 | ||||||
|  | 		motif = []; | ||||||
|  | 
 | ||||||
|  | 		orders.do({arg order, o; | ||||||
|  | 			var lastState, subMotif; | ||||||
|  | 			lastState = if(o == 0, {popSize.collect({["Rest"]})}, {motif.last.last.first}); | ||||||
|  | 			subMotif = genSubMotif.value(order, o, lastState, isLastOrder: o == (orders.size - 1)); | ||||||
|  | 			motif = motif.add(subMotif); | ||||||
|  | 
 | ||||||
|  | 		}); | ||||||
|  | 
 | ||||||
|  | 		sanityCheck.value(motif, index); | ||||||
|  | 
 | ||||||
|  | 		fSeq = fSeq.add(motif); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	//round last duration to measure | ||||||
|  | 	fDur = fSeq.flatten.flatten.slice(nil, 1).sum; | ||||||
|  | 	durAdd =  fDur.round(4) - fDur; | ||||||
|  | 	if(durAdd < 0, {durAdd = 4 - durAdd}); | ||||||
|  | 	fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] = fSeq[0][orders.size - 1][fSeq[0][orders.size - 1].size - 1][1] + durAdd; | ||||||
|  | 
 | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | genSecondarySeq = {arg seq; | ||||||
|  | 	var curdles, fSeq; | ||||||
|  | 	curdles = []; | ||||||
|  | 	while({curdles.sum < seq.size}, {curdles = curdles ++ [3.rand + 1]}); | ||||||
|  | 
 | ||||||
|  | 	fSeq = seq.clumps(curdles).collect({arg clump, m; | ||||||
|  | 		var repeats, paddedSeq; | ||||||
|  | 
 | ||||||
|  | 		//add rest | ||||||
|  | 		paddedSeq = clump.add([[[popSize.collect({["Rest"]}), 0.5.rand]]]); | ||||||
|  | 
 | ||||||
|  | 		//implement repeats | ||||||
|  | 		repeats = [0.rand + 1, 1].wchoose([1, 0].normalizeSum); | ||||||
|  | 		repeats.collect({paddedSeq}); | ||||||
|  | 	}); | ||||||
|  | 	fSeq | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------audition funcs | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.postln.notNil) { | ||||||
|  | 		~addr.sendMsg(~indexPath, ~indexMsg); | ||||||
|  | 		~addr.sendMsg(~seqPath, stringifyToDepth.value(~seqMsg, 3)); | ||||||
|  | 		//~addr.sendMsg("/STATE/OPEN", (dir.replace("supercollider", "resources") +/+ ~idMsg +/+ ~idMsg ++ "_gui_state" ++ ".state").standardizePath.postln); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | Event.addEventType(\osc, { | ||||||
|  | 	if (~addr.notNil) { | ||||||
|  | 		~msg; | ||||||
|  | 		~addr.sendMsg(~path, *~msg); | ||||||
|  | 	}; | ||||||
|  | }); | ||||||
|  | 
 | ||||||
|  | genPatterns = {arg inSeq, addr, oneShot = false; | ||||||
|  | 	var voices, durs, pbinds, res, indices, sectionDurs, msg, ids, seq; | ||||||
|  | 	seq = inSeq.collect({arg mSeq; mSeq[0]}); | ||||||
|  | 	# voices, durs =  seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	pbinds = voices.flop.collect({arg voice, v; | ||||||
|  | 		var clumps, hdScores, freqs, fDurs, attacks, rels, amps; | ||||||
|  | 		clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 		freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 		fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 		//attacks = 2.collect({rrand(1, 3)}) ++ freqs.drop(2).collect({rrand(0.3, 0.5)}); | ||||||
|  | 		attacks = fDurs.collect({arg dur; dur * rrand(0.2, 0.4)}); | ||||||
|  | 		//rels = freqs.drop(2).collect({rrand(0.3, 0.5)}) ++ 2.collect({rrand(1, 3)}); | ||||||
|  | 		rels = (clumps.size - 1).collect({arg c; | ||||||
|  | 			if(clumps[c + 1][0] == ["Rest"], {rrand(1.0, 3.0)}, {rrand(0.3, 0.5)}); | ||||||
|  | 		}); | ||||||
|  | 		rels = rels.add(rrand(1.0, 3.0)); | ||||||
|  | 		amps = freqs.collect({rrand(0.6, 0.99)}); | ||||||
|  | 
 | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \string_model, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\attack, Pseq(attacks, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\release, Pseq(rels, 1), | ||||||
|  | 				//\amp, Pseq(amps, 1), | ||||||
|  | 				\amp, Pbrown(0.5, 1, 0.5), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			), | ||||||
|  | 			Pbind( | ||||||
|  | 				\instrument, \sine, | ||||||
|  | 				\group, group, | ||||||
|  | 				\freq, Pseq(freqs, 1), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\sustain, Pseq(fDurs, 1), | ||||||
|  | 				\busIndex, v | ||||||
|  | 			) | ||||||
|  | 		] | ||||||
|  | 	}).flatten; | ||||||
|  | 	if(oneShot.not, { | ||||||
|  | 		msg = inSeq.collect({arg mSeq, m; mSeq[1..]}); | ||||||
|  | 		//ids = inSeq.collect({arg mSeq, m; mSeq[2]}); | ||||||
|  | 		sectionDurs = seq.collect({arg mSeq; mSeq.flatten2(mSeq.maxDepth - 5).flop[1].sum}); | ||||||
|  | 		pbinds = pbinds ++ | ||||||
|  | 		[ | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \osc, | ||||||
|  | 				\addr, addr, | ||||||
|  | 				\path, "/playing", | ||||||
|  | 				\msg, Pseq(msg, 1), | ||||||
|  | 				\dur, Pseq(sectionDurs, 1) | ||||||
|  | 			); | ||||||
|  | 		] | ||||||
|  | 	}); | ||||||
|  | 	res = Ppar(pbinds); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | /* | ||||||
|  | genMidiPatterns = {arg seq; | ||||||
|  | 	var voices, durs, patterns, res, mOut, pbRange; | ||||||
|  | 	pbRange = 1; //semitones - change this as needed for your situation | ||||||
|  | 	mOut =  MIDIOut.newByName("TiMidity", "TiMidity port 0").latency_(Server.default.latency); | ||||||
|  | 	# voices, durs = seq.flatten2(seq.maxDepth - 5).flop; | ||||||
|  | 	res = Ppar( | ||||||
|  | 		voices.flop.collect({arg voice, v; | ||||||
|  | 			var clumps, hdScores, freqs, fDurs; | ||||||
|  | 
 | ||||||
|  | 			mOut.program(v, 70); | ||||||
|  | 
 | ||||||
|  | 			clumps = voice.separate({arg a, b; a != b }); | ||||||
|  | 			freqs = clumps.collect({arg clump; if(clump[0] != ["Rest"], {(60.midicps * hsArrayToFreq.value(clump[0]))}, {Rest(0)})}); | ||||||
|  | 			fDurs = durs.clumps(clumps.collect({arg clump; clump.size})).collect({arg clump; clump.sum}); | ||||||
|  | 
 | ||||||
|  | 			Pbind( | ||||||
|  | 				\type, \midi, | ||||||
|  | 				\chan, v, | ||||||
|  | 				\noteval, Pseq(freqs.cpsmidi - 24, 1), | ||||||
|  | 				\note, Pfunc({ | event | event[\noteval].floor }), | ||||||
|  | 				\dur, Pseq(fDurs, 1), | ||||||
|  | 				\midiout, mOut, | ||||||
|  | 				\amp, 1, | ||||||
|  | 				\bend, Pfunc({ | ||||||
|  | 					| event | | ||||||
|  | 					if (event[\note].isRest.not) { | ||||||
|  | 						var pitchbendvalue = event[\noteval].frac.linlin(0, pbRange, 8192, 8192*2).asInteger; | ||||||
|  | 						m.bend(v, pitchbendvalue); | ||||||
|  | 					}; | ||||||
|  | 					0; // return something other than nil to avoid stopping the pattern | ||||||
|  | 				}), | ||||||
|  | 			); | ||||||
|  | 		}); | ||||||
|  | 	); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | */ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------resource management funcs | ||||||
|  | 
 | ||||||
|  | genUID = {Date.seed.asHexString.toLower}; | ||||||
|  | 
 | ||||||
|  | seedFunc = {arg func, seed; | ||||||
|  | 	var funcArgs, next; | ||||||
|  | 	next = Routine({loop{func.valueArray(funcArgs).yield }}); | ||||||
|  | 	next.randSeed_(seed); | ||||||
|  | 	{arg ...args; funcArgs = args; next.value} | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | stringifyToDepth = {arg data, maxDepth = 1; | ||||||
|  | 	var prettyString = "", rCount = 0, writeArray, indent; | ||||||
|  | 
 | ||||||
|  | 	if(maxDepth == 0, { | ||||||
|  | 		data.asCompileString | ||||||
|  | 	}, { | ||||||
|  | 		indent = {arg size; size.collect({" "}).join("")}; | ||||||
|  | 		writeArray = {arg array; | ||||||
|  | 			prettyString = prettyString ++ indent.value(rCount) ++ "[\n"; | ||||||
|  | 			rCount = rCount + 1; | ||||||
|  | 			if(rCount < maxDepth, { | ||||||
|  | 				array.do({arg subArray; writeArray.value(subArray)}); | ||||||
|  | 			}, { | ||||||
|  | 				prettyString = prettyString ++ array.collect({arg subArray; | ||||||
|  | 					indent.value(rCount + 1) ++ subArray.asCompileString | ||||||
|  | 				}).join(",\n"); | ||||||
|  | 			}); | ||||||
|  | 			rCount = rCount - 1; | ||||||
|  | 			prettyString = prettyString ++ "\n" ++ indent.value(rCount) ++ "],\n"; | ||||||
|  | 		}; | ||||||
|  | 
 | ||||||
|  | 		writeArray.value(data); | ||||||
|  | 		prettyString.replace(",\n\n", "\n").drop(-2); | ||||||
|  | 	}) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | sanityCheck = {arg motif, index; | ||||||
|  | 	//print functions = very helpful | ||||||
|  | 	("----------" + index + "------------").postln; | ||||||
|  | 
 | ||||||
|  | 	motif.flatten.do({arg val, v; | ||||||
|  | 		if(v > 0, { | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) > 1, {"problem 1".postln}); | ||||||
|  | 			if(motif.flatten[v-1][0].hammingDistance(val[0]) == 0, {"problem 2".postln}); | ||||||
|  | 		}); | ||||||
|  | 		val.postln | ||||||
|  | 	}); | ||||||
|  | 	"***********".postln; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | msgInterpret = {arg in, escapeDoubleQuotes = true, escapeSingleQuotes = true; | ||||||
|  | 	var res; | ||||||
|  | 
 | ||||||
|  | 	res = in; | ||||||
|  | 	if(res.isNil.not, { | ||||||
|  | 		if((res.isArray && res.isString.not), { | ||||||
|  | 			res = res.asCompileString; | ||||||
|  | 			res = res.replace(" ", "").replace("\n", "").replace("\t", ""); | ||||||
|  | 			if(escapeSingleQuotes, {res = res.replace("\'", "")}); | ||||||
|  | 			if(escapeDoubleQuotes, {res = res.replace("\"", "")}); | ||||||
|  | 			res = res.replace("Rest", "\"Rest\""); | ||||||
|  | 			res = res.interpret; | ||||||
|  | 		}, { | ||||||
|  | 			var tmpRes; | ||||||
|  | 			if(res.every({arg char; char.isDecDigit}), {tmpRes = res.asInteger}); | ||||||
|  | 			if(res.contains("."), {tmpRes = res.asFloat}); | ||||||
|  | 			if(tmpRes != nil, {res = tmpRes}); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	res | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | writeResources = {arg path, dict; | ||||||
|  | 	var file, modelItems, resString; | ||||||
|  | 	file = File(path,"w"); | ||||||
|  | 
 | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 
 | ||||||
|  | 	resString = nameSpaces.collect({arg nameSpace; | ||||||
|  | 		var depth = 0, insert = " "; | ||||||
|  | 		if(nameSpace == "music_data", {depth = 3; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "last_changes", {depth = 1; insert = "\n"}); | ||||||
|  | 		if(nameSpace == "order", {depth = 1; insert = "\n"}); | ||||||
|  | 		if((nameSpace == "ref_uid") && (dict[nameSpace] == nil), {dict[nameSpace] = "nil"}); | ||||||
|  | 		"\"" ++ nameSpace ++ "\":" ++ insert ++ stringifyToDepth.value(dict[nameSpace], depth) | ||||||
|  | 	}).join(",\n"); | ||||||
|  | 
 | ||||||
|  | 	resString = "{\n" ++ resString ++ "\n}"; | ||||||
|  | 
 | ||||||
|  | 	file.write(resString); | ||||||
|  | 	file.close; | ||||||
|  | 	resString | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadModelFile = {arg path; loadModelJSON.value(File(path, "r").readAllString.parseJSON)}; | ||||||
|  | 
 | ||||||
|  | loadModelJSON = {arg jsonObject; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS; nS->msgInterpret.value(jsonObject[nS])})); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | setGlobalVars = {arg dict, skipLastXChanges = false; | ||||||
|  | 	var tmpLastXChanges; | ||||||
|  | 	tmpLastXChanges = lastXChanges.deepCopy; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	# seq, lastXChanges, curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 	entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 	ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 	orders, susWeights, orderSize, passagesSize, | ||||||
|  | 	motifEdited, orderEdited = nameSpaces.collect({arg nS; dict[nS]}); | ||||||
|  | 	if(skipLastXChanges, {lastXChanges = tmpLastXChanges}); | ||||||
|  | 	dict | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | globalVarsToDict = { | ||||||
|  | 	var modelItems, dict; | ||||||
|  | 	// order really matters!!!! | ||||||
|  | 	modelItems = [ | ||||||
|  | 		seq, lastXChanges, | ||||||
|  | 		curUID, refUID, orderSeed, durSeed, motifSeed, | ||||||
|  | 		entrancesProbVals, passagesProbVals, exitsProbVals, | ||||||
|  | 		ranges, stepProbsVals, passagesWeights, hdExp, hdInvert, | ||||||
|  | 		orders, susWeights, orderSize, passagesSize, | ||||||
|  | 		motifEdited, orderEdited | ||||||
|  | 	]; | ||||||
|  | 	dict = Dictionary.with(*nameSpaces.collect({arg nS, n; nS->modelItems[n]})); | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerFile = {arg path; | ||||||
|  | 	ledgerPath = path; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	loadLedgerJSON.value(File(ledgerPath, "r").readAllString.parseJSON) | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | loadLedgerJSON = {arg jsonObject; ledger = jsonObject["ledger"]}; | ||||||
|  | 
 | ||||||
|  | saveLedger = {arg ledger, path; | ||||||
|  | 	var file, curResourceDir; | ||||||
|  | 	file = File(path, "w"); | ||||||
|  | 	curResourceDir = resourceDir; | ||||||
|  | 	resourceDir = path.splitext(".").drop(-1).join; | ||||||
|  | 	if(curResourceDir != resourceDir, { | ||||||
|  | 		File.mkdir(resourceDir); | ||||||
|  | 		ledger.do({arg id; | ||||||
|  | 			File.copy(curResourceDir +/+ id, resourceDir +/+ id); | ||||||
|  | 		}); | ||||||
|  | 	}); | ||||||
|  | 	file.write("{\n\"ledger\":\n" ++ stringifyToDepth.value(ledger, 1) ++ "\n}"); | ||||||
|  | 	file.close; | ||||||
|  | }; | ||||||
|  | 
 | ||||||
|  | //------global vars | ||||||
|  | 
 | ||||||
|  | primes = [[2, 1], [3, 2], [5, 4], [7, 4], [11, 8], [13, 8]]; | ||||||
|  | //ranges = [[-2400, 0], [-1200, 1200], [0, 2400], [0, 2400]]; | ||||||
|  | exPath = thisProcess.nowExecutingPath; | ||||||
|  | dir = exPath.dirname; | ||||||
|  | //popSize = 4; | ||||||
|  | dims = primes.size; | ||||||
|  | tuples = genTuples.value(); | ||||||
|  | //refUID = nil; | ||||||
|  | group = Group.new; | ||||||
|  | ~group = group; | ||||||
|  | loadLedgerFile.value(dir +/+ ".." +/+ "resources" +/+ "piece_ledger.json"); | ||||||
|  | resourceDir = (dir +/+ ".." +/+ "resources" +/+ "piece_ledger"); | ||||||
|  | //passagesWeights = [1, 1, 1, 1, 1]; | ||||||
|  | //susWeights = [1, 1, 1]; | ||||||
|  | // order really matters!!!! | ||||||
|  | nameSpaces = [ | ||||||
|  | 	"music_data", "last_changes", "cur_uid", "ref_uid", "order_seed", "dur_seed", "motifs_seed", | ||||||
|  | 	"entrances_probs_vals","passages_probs_vals", "exits_probs_vals", | ||||||
|  | 	"ranges", "step_probs_vals", "passages_weights", "hd_exp", "hd_invert", | ||||||
|  | 	"order", "sus_weights", "order_size", "passages_size", | ||||||
|  | 	"motif_edited", "order_edited" | ||||||
|  | ]; | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | //------OSC funcs | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_ledger, {arg msg, time, addr, port; | ||||||
|  | 	loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \load_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\load_model, {arg msg, time, addr, port; | ||||||
|  | 	var dict; | ||||||
|  | 	dict = loadModelFile.value(msg[1].asString); | ||||||
|  | 	setGlobalVars.value(dict); | ||||||
|  | }, \load_model); | ||||||
|  | 
 | ||||||
|  | OSCdef(\save_ledger, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	ledger = msgInterpret.value(msg[1].asString.parseJSON["ledger"], false).postln; | ||||||
|  | 	//loadLedgerJSON.value(msg[0]) | ||||||
|  | 	saveLedger.value(ledger, msg[2].asString); | ||||||
|  | 	//loadLedgerFile.value(msg[1].asString); | ||||||
|  | }, \save_ledger); | ||||||
|  | 
 | ||||||
|  | OSCdef(\generate, {arg msg, time, addr, port; | ||||||
|  | 	var path, dict, durSeeds, musPath, modelString; | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	path = msg[1].asString; | ||||||
|  | 
 | ||||||
|  | 	dict = loadModelFile.value(path); | ||||||
|  | 	setGlobalVars.value(dict, true); | ||||||
|  | 
 | ||||||
|  | 	popSize = ranges.size; | ||||||
|  | 
 | ||||||
|  | 	//refUID.postln; | ||||||
|  | 
 | ||||||
|  | 	loadLedgerFile.value(ledgerPath); | ||||||
|  | 	if(ledger == nil, {ledger = ["tmp"]}); | ||||||
|  | 	if(ledger.last != "tmp", {ledger = ledger.add("tmp")}); | ||||||
|  | 
 | ||||||
|  | 	if(refUID == nil, {lastXChanges = [initVoices.value().deepCopy]}); | ||||||
|  | 	if((refUID != nil) && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		lastXChanges = msgInterpret.value(file.readAllString.parseJSON["last_changes"]); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	refUID.postln; | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	durSeeds = seedFunc.value({3.collect({rrand(100000, 999999)})}, durSeed).value.postln; | ||||||
|  | 	entrancesDurFunc = genDurFunc.valueArray(entrancesProbVals[..4] ++ [entrancesProbVals[5..]] ++ [durSeeds[0]]); | ||||||
|  | 	passagesDurFunc = genDurFunc.valueArray(passagesProbVals[..4] ++ [passagesProbVals[5..]] ++ [durSeeds[1]]); | ||||||
|  | 	exitsDurFunc = genDurFunc.valueArray(exitsProbVals[..4] ++ [exitsProbVals[5..]] ++ [durSeeds[2]]); | ||||||
|  | 
 | ||||||
|  | 	if(orders == nil, { | ||||||
|  | 		orders = seedFunc.value(genOrders, orderSeed).valueArray(orderSize ++ passagesSize); | ||||||
|  | 		//addr.sendMsg("/order", stringifyToDepth.value(orders, 1)); | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	stepFunc = genStepFunc.valueArray(stepProbsVals[..1] ++ [stepProbsVals[2..]] ++ [motifSeed]); | ||||||
|  | 	seq = seedFunc.value(genMotif, motifSeed).value; | ||||||
|  | 
 | ||||||
|  | 	lastXChanges.collect({arg item; item.postln}); | ||||||
|  | 
 | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	modelString = writeResources.value(path, dict); | ||||||
|  | 
 | ||||||
|  | 	//addr.sendMsg("/generated", musPath, stringifyToDepth.value(seq, 3)); | ||||||
|  | 	//~seq = seq; | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/generated", path, modelString, ledgerPath); | ||||||
|  | }, \generate); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\commit, {arg msg, time, addr, port; | ||||||
|  | 	var musicData, musicChanged, dict, newLedger, modelPath, musString, musFile, test1, test2, lastCurUID, commitType, commitPos, equalityLedger; | ||||||
|  | 	//msg.postln; | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	test1 = msg[1].asString.parseJSON; | ||||||
|  | 	test2 = (dir +/+ ".." +/+ "resources/tmp/tmp_music" ++ ".json").standardizePath.parseJSONFile; | ||||||
|  | 	msgInterpret.value(test1["music"])[0][0][0][1].class.postln; | ||||||
|  | 	msgInterpret.value(test2["music_data"])[0][0][0][1].class.postln; | ||||||
|  | 	(test1["music"] == test2["music_data"]).postln; | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	musicData = loadModelJSON.value(msg[1].asString.parseJSON)["music_data"].postln; | ||||||
|  | 	musicChanged = (musicData != seq).postln; | ||||||
|  | 	commitType = msg[2].asString; | ||||||
|  | 	commitPos = msg[3].postln.asInteger; | ||||||
|  | 
 | ||||||
|  | 	lastCurUID = curUID.deepCopy; | ||||||
|  | 	curUID = genUID.value; | ||||||
|  | 
 | ||||||
|  | 	File.mkdir((resourceDir +/+ curUID).standardizePath); | ||||||
|  | 	File.copy(exPath, (resourceDir +/+ curUID +/+ curUID ++ "_code" ++ ".scd").standardizePath); | ||||||
|  | 
 | ||||||
|  | 	modelPath = (resourceDir +/+ curUID +/+ curUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 	dict = globalVarsToDict.value; | ||||||
|  | 	if(musicChanged, { | ||||||
|  | 		seq = musicData; | ||||||
|  | 		dict["music_data"] = seq; | ||||||
|  | 		dict["motif_edited"] = "true" | ||||||
|  | 	}); | ||||||
|  | 	dict["cur_uid"] = curUID; | ||||||
|  | 
 | ||||||
|  | 	writeResources.value(modelPath, dict); | ||||||
|  | 
 | ||||||
|  | 	File.delete(ledgerPath ++ "_bak"); | ||||||
|  | 	File.copy(ledgerPath, ledgerPath ++ "_bak"); | ||||||
|  | 	File.delete(ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	/* | ||||||
|  | 	if(commitType == "add", { | ||||||
|  | 		if(lastCurUID == "tmp", { | ||||||
|  | 			ledger = ledger.drop(-1).add(curUID); | ||||||
|  | 		}, { | ||||||
|  | 			ledger = ledger.add(curUID); | ||||||
|  | 		}) | ||||||
|  | 	}); | ||||||
|  | 	*/ | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "add", {ledger = ledger.add(curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "insert", {ledger = ledger.insert(commitPos + 1, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	if(commitType == "replace", {ledger = ledger.put(commitPos, curUID)}); | ||||||
|  | 
 | ||||||
|  | 	equalityLedger = ledger.collect({arg item; item.asSymbol}); | ||||||
|  | 	if(equalityLedger.includes(\tmp).postln, {ledger.removeAt(equalityLedger.indexOf(\tmp).postln)}); | ||||||
|  | 
 | ||||||
|  | 	ledger.postln; | ||||||
|  | 
 | ||||||
|  | 	saveLedger.value(ledger, ledgerPath); | ||||||
|  | 
 | ||||||
|  | 	addr.sendMsg("/committed", curUID, ledgerPath); | ||||||
|  | 	//refUID = curUID; | ||||||
|  | 
 | ||||||
|  | }, \commit); | ||||||
|  | 
 | ||||||
|  | OSCdef(\transport, {arg msg, time, addr, port; | ||||||
|  | 	msg.postln; | ||||||
|  | 	if(msg[1] == 0, { | ||||||
|  | 		group.set(\release, 2); | ||||||
|  | 		group.set(\gate, 0); | ||||||
|  | 		player.stop; | ||||||
|  | 	}, { | ||||||
|  | 		// the cued sequence can now be read from file, so this can be cleaned up | ||||||
|  | 		var cSize, patterns, pSeq, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 		if(msg[1] == 1, { | ||||||
|  | 			pSeq = []; | ||||||
|  | 			cuedSeek = (seq != nil); | ||||||
|  | 			indexStart = msg[2].asInteger; | ||||||
|  | 			indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 			//ledger.postln; | ||||||
|  | 			if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 				ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 					var path, file; | ||||||
|  | 					path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					pSeq = pSeq.add([msgInterpret.value(file.readAllString.postln.parseJSON["music_data"]), path, indexStart + index, uid]); | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 			}); | ||||||
|  | 			if(cuedSeek, { | ||||||
|  | 				var path, file; | ||||||
|  | 				path = (resourceDir +/+ "tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 				file.close; | ||||||
|  | 			}); | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr); | ||||||
|  | 		}, { | ||||||
|  | 			pSeq = [loadModelJSON.value(msg[2].asString.parseJSON)["music_data"].postln]; | ||||||
|  | 			patterns = genPatterns.value(pSeq, addr, true); | ||||||
|  | 		}); | ||||||
|  | 		player = Pfset(pattern: patterns, cleanupFunc: { | ||||||
|  | 			addr.sendMsg("/transport", 0); | ||||||
|  | 			addr.sendMsg("/one_shot", 0); | ||||||
|  | 		}); | ||||||
|  | 		player = player.play | ||||||
|  | 	}); | ||||||
|  | }, \transport); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_motif, {arg msg, time, addr, port; | ||||||
|  | 	var tSeq, refChord, refUID; | ||||||
|  | 
 | ||||||
|  | 	msg.postln; | ||||||
|  | 
 | ||||||
|  | 	tSeq = [loadModelJSON.value(msg[1].asString.parseJSON)["music_data"]]; | ||||||
|  | 	refUID = msg[2].asString.postln; | ||||||
|  | 
 | ||||||
|  | 	if((refUID != "nil") && (refUID != "tmp"), { | ||||||
|  | 		var file; | ||||||
|  | 		file = File((resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath, "r"); | ||||||
|  | 		refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 		file.close; | ||||||
|  | 	}, { | ||||||
|  | 		refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | 	~transcribe.value(tSeq, refChord, (dir +/+ ".." +/+ "lilypond" +/+ "includes").standardizePath, addr, "/transcribe_motif"); | ||||||
|  | }, \transcribe_motif); | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | OSCdef(\transcribe_all, {arg msg, time, addr, port; | ||||||
|  | 	var cSize, patterns, cuedSeek, indexStart, indexEnd, tmpLedger; | ||||||
|  | 	if(true, { | ||||||
|  | 		cuedSeek = (seq != nil); | ||||||
|  | 		indexStart = msg[1].asInteger; | ||||||
|  | 		indexEnd = ledger.size - if(cuedSeek, {2}, {1}); | ||||||
|  | 
 | ||||||
|  | 		//tmp for testing transcription | ||||||
|  | 		indexEnd = (indexStart+5); | ||||||
|  | 
 | ||||||
|  | 		//ledger.postln; | ||||||
|  | 		if(((indexStart == (ledger.size - 1)) && cuedSeek).not, { | ||||||
|  | 			var lilyPartLedgerFiles; | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles = 4.collect({arg p; | ||||||
|  | 				File((dir +/+ ".." +/+ "lilypond" +/+ "includes" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly").standardizePath, "w"); | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			ledger[indexStart..indexEnd].do({arg uid, index; | ||||||
|  | 				var path, file, fileString, tSeq, refUID, refChord; | ||||||
|  | 				path = (resourceDir +/+ uid +/+ uid ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 				file = File(path, "r"); | ||||||
|  | 				fileString = file.readAllString; | ||||||
|  | 				tSeq = msgInterpret.value(fileString.parseJSON["music_data"]); | ||||||
|  | 				refUID = msgInterpret.value(fileString.parseJSON["ref_uid"]); | ||||||
|  | 				file.close; | ||||||
|  | 
 | ||||||
|  | 				//uid.postln; | ||||||
|  | 				//(refUID == "nil").postln; | ||||||
|  | 
 | ||||||
|  | 				refChord = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]; | ||||||
|  | 
 | ||||||
|  | 				if(refUID != "nil", { | ||||||
|  | 					path = (resourceDir +/+ refUID +/+ refUID ++ "_mus_model" ++ ".json").standardizePath; | ||||||
|  | 					file = File(path, "r"); | ||||||
|  | 					refChord = msgInterpret.value(file.readAllString.parseJSON["last_changes"]).last; | ||||||
|  | 					file.close; | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				if(index != indexEnd, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath); | ||||||
|  | 				}, { | ||||||
|  | 					~transcribe.value(tSeq, refChord, (resourceDir +/+ uid +/+ "lilypond").standardizePath, addr, "/transcribe_all"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 				lilyPartLedgerFiles.do({arg f, p; | ||||||
|  | 					f.write("\\include \"" ++ resourceDir +/+ uid +/+ "lilypond" +/+ "part_" ++ ["IV", "III", "II", "I"][p] ++ ".ly\"\n"); | ||||||
|  | 				}); | ||||||
|  | 
 | ||||||
|  | 			}); | ||||||
|  | 
 | ||||||
|  | 			lilyPartLedgerFiles.do({arg f; | ||||||
|  | 				f.close | ||||||
|  | 			}); | ||||||
|  | 		}); | ||||||
|  | 		/* | ||||||
|  | 		if(cuedSeek, { | ||||||
|  | 			var path, file; | ||||||
|  | 			path = (dir +/+ ".." +/+ "resources/tmp/tmp_mus_model" ++ ".json").standardizePath; | ||||||
|  | 			file = File(path, "r"); | ||||||
|  | 			pSeq = pSeq.add([msgInterpret.value(file.readAllString.parseJSON["music_data"]), path, ledger.size - 1, "tmp"]); | ||||||
|  | 			file.close; | ||||||
|  | 		}); | ||||||
|  | 		*/ | ||||||
|  | 	}, { | ||||||
|  | 
 | ||||||
|  | 	}); | ||||||
|  | 
 | ||||||
|  | }, \transcribe_all); | ||||||
|  | 
 | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @ -0,0 +1,53 @@ | |||||||
|  | { | ||||||
|  | "music_data": | ||||||
|  | [ | ||||||
|  |  [ | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 1, 1 ] ], 1.75 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 0, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], 1.5 ] | ||||||
|  |   ], | ||||||
|  |   [ | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ 1, 0, 1, -2, 0, 1 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 0, 1 ], [ "Rest" ] ], 1.5 ], | ||||||
|  |     [ [ [ "Rest" ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 2, -2, 2, 0 ], [ "Rest" ] ], 0 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 2, -2, 2, 0 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 0.75 ], | ||||||
|  |     [ [ [ -1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ "Rest" ] ], 1.625 ], | ||||||
|  |     [ [ [ -1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ "Rest" ], [ "Rest" ] ], 1.125 ], | ||||||
|  |     [ [ [ -1, -1, 1, -1, 2, 0 ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 0.625 ], | ||||||
|  |     [ [ [ "Rest" ], [ "Rest" ], [ "Rest" ], [ "Rest" ] ], 2.375 ] | ||||||
|  |   ] | ||||||
|  |  ] | ||||||
|  | ], | ||||||
|  | "last_changes": | ||||||
|  | [ | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 0, 1 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 1, -1, 1, -3, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 2, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 1, -1, 2, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ 0, -1, 1, -2, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ], | ||||||
|  |   [ [ -1, -1, 1, -1, 2, 0 ], [ 1, -1, 1, -2, 2, 0 ], [ 2, -2, 1, -2, 2, 0 ], [ 1, 0, 1, -2, 1, 1 ] ] | ||||||
|  | ], | ||||||
|  | "cur_uid": "7edbdceb", | ||||||
|  | "ref_uid": "6d635e88", | ||||||
|  | "order_seed": 516056, | ||||||
|  | "dur_seed": 358555, | ||||||
|  | "motifs_seed": 481455, | ||||||
|  | "entrances_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.47, 1.62, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "passages_probs_vals": [ 0.29, 0, 1.1111111111111, 0.65934065934066, 1.37, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "exits_probs_vals": [ 0.18, 0.28, 1.4285714285714, 0.47, 1.62, 0, 0.5, 0.5, 0.5, 1, 0.5 ], | ||||||
|  | "ranges": [ [ -2411.1455108359, -850.77399380805 ], [ -1872, 450 ], [ -479, 1304.0247678019 ], [ -368, 1173.9938080495 ] ], | ||||||
|  | "step_probs_vals": [ 0, 1200, 0, 0, 0.082304526748971, 0.99431818181818, 0.33950617283951, 0, 0.72839506172839, 0, 1, 0 ], | ||||||
|  | "passages_weights": [ 0.35, 0.42, 0.75, 0.9, 0.93 ], | ||||||
|  | "hd_exp": 2, | ||||||
|  | "hd_invert": 0, | ||||||
|  | "order": | ||||||
|  | [ | ||||||
|  |   [ [ 3 ], [ 2 ], [ 0, 1 ] ], | ||||||
|  |   [ [ 1 ], [ 2, 0, 2, 0 ], [ 3 ] ] | ||||||
|  | ], | ||||||
|  | "sus_weights": [ 0.41, 0, 0 ], | ||||||
|  | "order_size": [ 2, 6 ], | ||||||
|  | "passages_size": [ 0, 5 ], | ||||||
|  | "motif_edited": "false", | ||||||
|  | "order_edited": "false" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { ais'1^\markup { \pad-markup #0.2 "+41"} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais'2 ~ ais'8[ r8] r4 } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r2.  r8[ f'8^\markup { \pad-markup #0.2 "-11"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "I"\normal-size-super " 11↓" }}] ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'4 ~ f'8[ e'8^\markup { \pad-markup #0.2 "+36"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 5↑" }}] ~ e'4 f'4^\markup { \pad-markup #0.2 "+47"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "IV"\normal-size-super " 3↓" }} ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { f'2. ~ f'8.[ r16] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r2  r8[ c'8^\markup { \pad-markup #0.2 "+49"}] ~ c'4 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'1 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c'2 r2  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
| @ -0,0 +1,16 @@ | |||||||
|  | {   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r4 r8[ c8^\markup { \pad-markup #0.2 "+49"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 1↑" }}] ~ c2 ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { c8[ ais,8^\markup { \pad-markup #0.2 "+18"}_\markup {  \lower #3 \pad-markup #0.2 \concat{ "III"\normal-size-super " 7↑" }}] ~ ais,2. ~ } | ||||||
|  |  \bar "|"   | ||||||
|  |  { ais,2. ~ ais,16[ r8.] } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1  } | ||||||
|  |  \bar "|"   | ||||||
|  |  { r1 } | ||||||
|  | \bar "||" | ||||||
|  | } | ||||||
					Loading…
					
					
				
		Reference in New Issue