Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
LilDrippyMyFnf
GitHub Repository: LilDrippyMyFnf/FNF-BitEnigne
Path: blob/master/Non-Haxe-Files/Syncer.js
2156 views
1
/**
2
* Echo Nest integration - upload file to the Echo nest directly from the browser
3
*
4
*/
5
6
/*global require, define, window, console, document, File */
7
8
define(["krusovice/thirdparty/remix", "krusovice/thirdparty/sparkmd5"], function(Nest, SparkMD5) {
9
10
"use strict";
11
12
// Cache expensive HTTP upload result operations in local storage
13
function getCachedResult(hash) {
14
var json = window.localStorage[hash];
15
if(json) {
16
return JSON.parse(json);
17
}
18
19
return null;
20
}
21
22
function storeCachedResult(hash, result) {
23
window.localStorage[hash] = result;
24
}
25
26
function createNest(apiKey) {
27
var nest = new Nest(apiKey);
28
return nest;
29
}
30
31
/**
32
* Convert Echo Nest data to Krusovice internal format in place.
33
*
34
* We optimize some timestamps for quicker look up.
35
*
36
* @param {Object} analysis Raw echo nest data
37
*/
38
function preprocessData(data) {
39
var i;
40
var bars = data.analysis.bars, beats = data.analysis.beats;
41
42
for(i=0; i<bars.length; i++) {
43
bars[i].start *= 1000; // ms
44
bars[i].duration *= 1000;
45
}
46
47
for(i=0; i<beats.length; i++) {
48
beats[i].start *= 1000; // ms
49
beats[i].duration *= 1000;
50
}
51
52
return data;
53
}
54
55
/**
56
* Calculate file hash using SparkMD5 lib
57
*
58
* @param {Object} file window.File instance
59
* @param {Function} done Callback done(hash) when hashing is finished
60
*/
61
function calculateHash(file, done) {
62
63
var fileReader = new FileReader(),
64
blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
65
chunkSize = 2097152, // read in chunks of 2MB
66
chunks = Math.ceil(file.size / chunkSize),
67
currentChunk = 0,
68
spark = new SparkMD5();
69
70
fileReader.onload = function(e) {
71
console.log("read chunk nr", currentChunk + 1, "of", chunks);
72
spark.appendBinary(e.target.result); // append binary string
73
currentChunk++;
74
75
if (currentChunk < chunks) {
76
loadNext();
77
}
78
else {
79
console.log("finished hashing");
80
var hash = spark.end();
81
done(hash);
82
}
83
};
84
85
function loadNext() {
86
var start = currentChunk * chunkSize,
87
end = start + chunkSize >= file.size ? file.size : start + chunkSize;
88
89
fileReader.readAsBinaryString(blobSlice.call(file, start, end));
90
}
91
92
loadNext();
93
}
94
95
96
/**
97
* Ask track analysis info from Echo Nest HTTP API.
98
*
99
* Store the file result in localStorage cache.
100
*
101
* @param {[type]} apiKey [description]
102
* @param {[type]} file [description]
103
* @param {Function} done Called with succesful track analysis done(data) where data.analysis contains bars, beats
104
* @param {Function} failed Called with unsuccesful track analysis
105
* @return {[type]} [description]
106
*/
107
function analyzeFile (apiKey, file, done, failed) {
108
109
var hash = null;
110
111
console.log('analyzing file', file);
112
113
var nest = createNest(apiKey);
114
115
function ready(data) {
116
return done(preprocessData(data));
117
}
118
119
// Call Echo Nest HTTP API with file payload
120
function postToAPI() {
121
console.log("Sending file to analyze");
122
123
nest.analyzeFile(file, nest.guessType(file), {
124
onload: function (result) {
125
126
var response = result.response;
127
128
var data = result;
129
130
console.log("Got response");
131
console.log(response);
132
133
if (response.track && response.track.audio_summary) {
134
console.log("Loading analysis URL:" + response.track.audio_summary.analysis_url);
135
136
if(!response.track.audio_summary.analysis_url) {
137
console.error("Echonest does not like us and didn't produce track analysis URL");
138
if(failed) {
139
failed();
140
return;
141
}
142
}
143
144
nest.loadAnalysis(response.track.audio_summary.analysis_url, {
145
onload: function (result) {
146
data.analysis = result;
147
storeCachedResult(hash, JSON.stringify(data));
148
ready(data);
149
}
150
});
151
}
152
}
153
});
154
}
155
156
// We now know if we are dealing with a new file or can use cached result
157
function gotHash(xhash) {
158
hash = xhash;
159
var cached = getCachedResult(hash);
160
if(cached) {
161
ready(cached);
162
} else {
163
postToAPI();
164
}
165
}
166
167
calculateHash(file, gotHash);
168
169
}
170
171
172
//
173
// Module API
174
//
175
176
return {
177
analyzeFile : analyzeFile
178
};
179
180
});
181
182