Path: blob/master/Non-Haxe-Files/Syncer.js
2156 views
/**1* Echo Nest integration - upload file to the Echo nest directly from the browser2*3*/45/*global require, define, window, console, document, File */67define(["krusovice/thirdparty/remix", "krusovice/thirdparty/sparkmd5"], function(Nest, SparkMD5) {89"use strict";1011// Cache expensive HTTP upload result operations in local storage12function getCachedResult(hash) {13var json = window.localStorage[hash];14if(json) {15return JSON.parse(json);16}1718return null;19}2021function storeCachedResult(hash, result) {22window.localStorage[hash] = result;23}2425function createNest(apiKey) {26var nest = new Nest(apiKey);27return nest;28}2930/**31* Convert Echo Nest data to Krusovice internal format in place.32*33* We optimize some timestamps for quicker look up.34*35* @param {Object} analysis Raw echo nest data36*/37function preprocessData(data) {38var i;39var bars = data.analysis.bars, beats = data.analysis.beats;4041for(i=0; i<bars.length; i++) {42bars[i].start *= 1000; // ms43bars[i].duration *= 1000;44}4546for(i=0; i<beats.length; i++) {47beats[i].start *= 1000; // ms48beats[i].duration *= 1000;49}5051return data;52}5354/**55* Calculate file hash using SparkMD5 lib56*57* @param {Object} file window.File instance58* @param {Function} done Callback done(hash) when hashing is finished59*/60function calculateHash(file, done) {6162var fileReader = new FileReader(),63blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,64chunkSize = 2097152, // read in chunks of 2MB65chunks = Math.ceil(file.size / chunkSize),66currentChunk = 0,67spark = new SparkMD5();6869fileReader.onload = function(e) {70console.log("read chunk nr", currentChunk + 1, "of", chunks);71spark.appendBinary(e.target.result); // append binary string72currentChunk++;7374if (currentChunk < chunks) {75loadNext();76}77else {78console.log("finished hashing");79var hash = spark.end();80done(hash);81}82};8384function loadNext() {85var start = currentChunk * chunkSize,86end = start + chunkSize >= file.size ? file.size : start + chunkSize;8788fileReader.readAsBinaryString(blobSlice.call(file, start, end));89}9091loadNext();92}939495/**96* Ask track analysis info from Echo Nest HTTP API.97*98* Store the file result in localStorage cache.99*100* @param {[type]} apiKey [description]101* @param {[type]} file [description]102* @param {Function} done Called with succesful track analysis done(data) where data.analysis contains bars, beats103* @param {Function} failed Called with unsuccesful track analysis104* @return {[type]} [description]105*/106function analyzeFile (apiKey, file, done, failed) {107108var hash = null;109110console.log('analyzing file', file);111112var nest = createNest(apiKey);113114function ready(data) {115return done(preprocessData(data));116}117118// Call Echo Nest HTTP API with file payload119function postToAPI() {120console.log("Sending file to analyze");121122nest.analyzeFile(file, nest.guessType(file), {123onload: function (result) {124125var response = result.response;126127var data = result;128129console.log("Got response");130console.log(response);131132if (response.track && response.track.audio_summary) {133console.log("Loading analysis URL:" + response.track.audio_summary.analysis_url);134135if(!response.track.audio_summary.analysis_url) {136console.error("Echonest does not like us and didn't produce track analysis URL");137if(failed) {138failed();139return;140}141}142143nest.loadAnalysis(response.track.audio_summary.analysis_url, {144onload: function (result) {145data.analysis = result;146storeCachedResult(hash, JSON.stringify(data));147ready(data);148}149});150}151}152});153}154155// We now know if we are dealing with a new file or can use cached result156function gotHash(xhash) {157hash = xhash;158var cached = getCachedResult(hash);159if(cached) {160ready(cached);161} else {162postToAPI();163}164}165166calculateHash(file, gotHash);167168}169170171//172// Module API173//174175return {176analyzeFile : analyzeFile177};178179});180181182