1048 lines
38 KiB
JavaScript
1048 lines
38 KiB
JavaScript
//>>built
|
|
define("dojox/data/AndOrReadStore", ["dojo/_base/kernel", "dojo/_base/declare", "dojo/_base/lang", "dojo/data/util/filter", "dojo/data/util/simpleFetch",
|
|
"dojo/_base/array", "dojo/date/stamp", "dojo/_base/json", "dojo/_base/window", "dojo/_base/xhr"],
|
|
function(kernel, declare, lang, filterUtil, simpleFetch, array, dateStamp, json, winUtil, xhr) {
|
|
|
|
var AndOrReadStore = declare("dojox.data.AndOrReadStore", null, {
|
|
// summary:
|
|
// AndOrReadStore uses ItemFileReadStore as a base, modifying only the query (_fetchItems) section.
|
|
// Supports queries of the form: query:"id:1* OR dept:'Sales Department' || (id:2* && NOT dept:S*)"
|
|
// Includes legacy/widget support via:
|
|
// query:{complexQuery:"id:1* OR dept:'Sales Department' || (id:2* && NOT dept:S*)"}
|
|
// The ItemFileReadStore implements the dojo.data.api.Read API and reads
|
|
// data from JSON files that have contents in this format --
|
|
// { items: [
|
|
// { name:'Kermit', color:'green', age:12, friends:['Gonzo', {_reference:{name:'Fozzie Bear'}}]},
|
|
// { name:'Fozzie Bear', wears:['hat', 'tie']},
|
|
// { name:'Miss Piggy', pets:'Foo-Foo'}
|
|
// ]}
|
|
// Note that it can also contain an 'identifer' property that specified which attribute on the items
|
|
// in the array of items that acts as the unique identifier for that item.
|
|
//
|
|
constructor: function(/* Object */ keywordParameters){
|
|
// summary: constructor
|
|
// keywordParameters: {url: String}
|
|
// keywordParameters: {data: jsonObject}
|
|
// keywordParameters: {typeMap: object)
|
|
// The structure of the typeMap object is as follows:
|
|
// {
|
|
// type0: function || object,
|
|
// type1: function || object,
|
|
// ...
|
|
// typeN: function || object
|
|
// }
|
|
// Where if it is a function, it is assumed to be an object constructor that takes the
|
|
// value of _value as the initialization parameters. If it is an object, then it is assumed
|
|
// to be an object of general form:
|
|
// {
|
|
// type: function, //constructor.
|
|
// deserialize: function(value) //The function that parses the value and constructs the object defined by type appropriately.
|
|
// }
|
|
|
|
this._arrayOfAllItems = [];
|
|
this._arrayOfTopLevelItems = [];
|
|
this._loadFinished = false;
|
|
this._jsonFileUrl = keywordParameters.url;
|
|
this._ccUrl = keywordParameters.url;
|
|
this.url = keywordParameters.url;
|
|
this._jsonData = keywordParameters.data;
|
|
this.data = null;
|
|
this._datatypeMap = keywordParameters.typeMap || {};
|
|
if(!this._datatypeMap['Date']){
|
|
//If no default mapping for dates, then set this as default.
|
|
//We use the dojo.date.stamp here because the ISO format is the 'dojo way'
|
|
//of generically representing dates.
|
|
this._datatypeMap['Date'] = {
|
|
type: Date,
|
|
deserialize: function(value){
|
|
return dateStamp.fromISOString(value);
|
|
}
|
|
};
|
|
}
|
|
this._features = {'dojo.data.api.Read':true, 'dojo.data.api.Identity':true};
|
|
this._itemsByIdentity = null;
|
|
this._storeRefPropName = "_S"; // Default name for the store reference to attach to every item.
|
|
this._itemNumPropName = "_0"; // Default Item Id for isItem to attach to every item.
|
|
this._rootItemPropName = "_RI"; // Default Item Id for isItem to attach to every item.
|
|
this._reverseRefMap = "_RRM"; // Default attribute for constructing a reverse reference map for use with reference integrity
|
|
this._loadInProgress = false; //Got to track the initial load to prevent duelling loads of the dataset.
|
|
this._queuedFetches = [];
|
|
|
|
if(keywordParameters.urlPreventCache !== undefined){
|
|
this.urlPreventCache = keywordParameters.urlPreventCache?true:false;
|
|
}
|
|
if(keywordParameters.hierarchical !== undefined){
|
|
this.hierarchical = keywordParameters.hierarchical?true:false;
|
|
}
|
|
if(keywordParameters.clearOnClose){
|
|
this.clearOnClose = true;
|
|
}
|
|
},
|
|
|
|
url: "", // use "" rather than undefined for the benefit of the parser (#3539)
|
|
|
|
//Internal var, crossCheckUrl. Used so that setting either url or _jsonFileUrl, can still trigger a reload
|
|
//when clearOnClose and close is used.
|
|
_ccUrl: "",
|
|
|
|
data: null, //Make this parser settable.
|
|
|
|
typeMap: null, //Make this parser settable.
|
|
|
|
//Parameter to allow users to specify if a close call should force a reload or not.
|
|
//By default, it retains the old behavior of not clearing if close is called. But
|
|
//if set true, the store will be reset to default state. Note that by doing this,
|
|
//all item handles will become invalid and a new fetch must be issued.
|
|
clearOnClose: false,
|
|
|
|
//Parameter to allow specifying if preventCache should be passed to the xhrGet call or not when loading data from a url.
|
|
//Note this does not mean the store calls the server on each fetch, only that the data load has preventCache set as an option.
|
|
//Added for tracker: #6072
|
|
urlPreventCache: false,
|
|
|
|
//Parameter to indicate to process data from the url as hierarchical
|
|
//(data items can contain other data items in js form). Default is true
|
|
//for backwards compatibility. False means only root items are processed
|
|
//as items, all child objects outside of type-mapped objects and those in
|
|
//specific reference format, are left straight JS data objects.
|
|
hierarchical: true,
|
|
|
|
_assertIsItem: function(/* item */ item){
|
|
// summary:
|
|
// This function tests whether the item passed in is indeed an item in the store.
|
|
// item:
|
|
// The item to test for being contained by the store.
|
|
if(!this.isItem(item)){
|
|
throw new Error("dojox.data.AndOrReadStore: Invalid item argument.");
|
|
}
|
|
},
|
|
|
|
_assertIsAttribute: function(/* attribute-name-string */ attribute){
|
|
// summary:
|
|
// This function tests whether the item passed in is indeed a valid 'attribute' like type for the store.
|
|
// attribute:
|
|
// The attribute to test for being contained by the store.
|
|
if(typeof attribute !== "string"){
|
|
throw new Error("dojox.data.AndOrReadStore: Invalid attribute argument.");
|
|
}
|
|
},
|
|
|
|
getValue: function( /* item */ item,
|
|
/* attribute-name-string */ attribute,
|
|
/* value? */ defaultValue){
|
|
// summary:
|
|
// See dojo.data.api.Read.getValue()
|
|
var values = this.getValues(item, attribute);
|
|
return (values.length > 0)?values[0]:defaultValue; // mixed
|
|
},
|
|
|
|
getValues: function(/* item */ item,
|
|
/* attribute-name-string */ attribute){
|
|
// summary:
|
|
// See dojo.data.api.Read.getValues()
|
|
|
|
this._assertIsItem(item);
|
|
this._assertIsAttribute(attribute);
|
|
var arr = item[attribute] || [];
|
|
// Clone it before returning. refs: #10474
|
|
return arr.slice(0, arr.length); // Array
|
|
},
|
|
|
|
getAttributes: function(/* item */ item){
|
|
// summary:
|
|
// See dojo.data.api.Read.getAttributes()
|
|
this._assertIsItem(item);
|
|
var attributes = [];
|
|
for(var key in item){
|
|
// Save off only the real item attributes, not the special id marks for O(1) isItem.
|
|
if((key !== this._storeRefPropName) && (key !== this._itemNumPropName) && (key !== this._rootItemPropName) && (key !== this._reverseRefMap)){
|
|
attributes.push(key);
|
|
}
|
|
}
|
|
return attributes; // Array
|
|
},
|
|
|
|
hasAttribute: function( /* item */ item,
|
|
/* attribute-name-string */ attribute){
|
|
// summary:
|
|
// See dojo.data.api.Read.hasAttribute()
|
|
this._assertIsItem(item);
|
|
this._assertIsAttribute(attribute);
|
|
return (attribute in item);
|
|
},
|
|
|
|
containsValue: function(/* item */ item,
|
|
/* attribute-name-string */ attribute,
|
|
/* anything */ value){
|
|
// summary:
|
|
// See dojo.data.api.Read.containsValue()
|
|
var regexp = undefined;
|
|
if(typeof value === "string"){
|
|
regexp = filterUtil.patternToRegExp(value, false);
|
|
}
|
|
return this._containsValue(item, attribute, value, regexp); //boolean.
|
|
},
|
|
|
|
_containsValue: function( /* item */ item,
|
|
/* attribute-name-string */ attribute,
|
|
/* anything */ value,
|
|
/* RegExp?*/ regexp){
|
|
// summary:
|
|
// Internal function for looking at the values contained by the item.
|
|
// description:
|
|
// Internal function for looking at the values contained by the item. This
|
|
// function allows for denoting if the comparison should be case sensitive for
|
|
// strings or not (for handling filtering cases where string case should not matter)
|
|
//
|
|
// item:
|
|
// The data item to examine for attribute values.
|
|
// attribute:
|
|
// The attribute to inspect.
|
|
// value:
|
|
// The value to match.
|
|
// regexp:
|
|
// Optional regular expression generated off value if value was of string type to handle wildcarding.
|
|
// If present and attribute values are string, then it can be used for comparison instead of 'value'
|
|
return array.some(this.getValues(item, attribute), function(possibleValue){
|
|
if(possibleValue !== null && !lang.isObject(possibleValue) && regexp){
|
|
if(possibleValue.toString().match(regexp)){
|
|
return true; // Boolean
|
|
}
|
|
} else if(value === possibleValue){
|
|
return true; // Boolean
|
|
} else {
|
|
return false;
|
|
}
|
|
});
|
|
},
|
|
|
|
isItem: function(/* anything */ something){
|
|
// summary:
|
|
// See dojo.data.api.Read.isItem()
|
|
if(something && something[this._storeRefPropName] === this){
|
|
if(this._arrayOfAllItems[something[this._itemNumPropName]] === something){
|
|
return true;
|
|
}
|
|
}
|
|
return false; // Boolean
|
|
},
|
|
|
|
isItemLoaded: function(/* anything */ something){
|
|
// summary:
|
|
// See dojo.data.api.Read.isItemLoaded()
|
|
return this.isItem(something); //boolean
|
|
},
|
|
|
|
loadItem: function(/* object */ keywordArgs){
|
|
// summary:
|
|
// See dojo.data.api.Read.loadItem()
|
|
this._assertIsItem(keywordArgs.item);
|
|
},
|
|
|
|
getFeatures: function(){
|
|
// summary:
|
|
// See dojo.data.api.Read.getFeatures()
|
|
return this._features; //Object
|
|
},
|
|
|
|
getLabel: function(/* item */ item){
|
|
// summary:
|
|
// See dojo.data.api.Read.getLabel()
|
|
if(this._labelAttr && this.isItem(item)){
|
|
return this.getValue(item,this._labelAttr); //String
|
|
}
|
|
return undefined; //undefined
|
|
},
|
|
|
|
getLabelAttributes: function(/* item */ item){
|
|
// summary:
|
|
// See dojo.data.api.Read.getLabelAttributes()
|
|
if(this._labelAttr){
|
|
return [this._labelAttr]; //array
|
|
}
|
|
return null; //null
|
|
},
|
|
|
|
_fetchItems: function( /* Object */ keywordArgs,
|
|
/* Function */ findCallback,
|
|
/* Function */ errorCallback){
|
|
// summary:
|
|
// See dojo.data.util.simpleFetch.fetch()
|
|
// filter modified to permit complex queries where
|
|
// logical operators are case insensitive:
|
|
// , NOT AND OR ( ) ! && ||
|
|
// Note: "," included for quoted/string legacy queries.
|
|
var self = this;
|
|
var filter = function(requestArgs, arrayOfItems){
|
|
var items = [];
|
|
if(requestArgs.query){
|
|
//Complete copy, we may have to mess with it.
|
|
//Safer than clone, which does a shallow copy, I believe.
|
|
var query = json.fromJson(json.toJson(requestArgs.query));
|
|
//Okay, object form query, we have to check to see if someone mixed query methods (such as using FilteringSelect
|
|
//with a complexQuery). In that case, the params need to be anded to the complex query statement.
|
|
//See defect #7980
|
|
if(typeof query == "object" ){
|
|
var count = 0;
|
|
var p;
|
|
for(p in query){
|
|
count++;
|
|
}
|
|
if(count > 1 && query.complexQuery){
|
|
var cq = query.complexQuery;
|
|
var wrapped = false;
|
|
for(p in query){
|
|
if(p !== "complexQuery"){
|
|
//We should wrap this in () as it should and with the entire complex query
|
|
//Not just part of it.
|
|
if(!wrapped){
|
|
cq = "( " + cq + " )";
|
|
wrapped = true;
|
|
}
|
|
//Make sure strings are quoted when going into complexQuery merge.
|
|
var v = requestArgs.query[p];
|
|
if(lang.isString(v)){
|
|
v = "'" + v + "'";
|
|
}
|
|
cq += " AND " + p + ":" + v;
|
|
delete query[p];
|
|
|
|
}
|
|
}
|
|
query.complexQuery = cq;
|
|
}
|
|
}
|
|
|
|
var ignoreCase = requestArgs.queryOptions ? requestArgs.queryOptions.ignoreCase : false;
|
|
//for complex queries only: pattern = query[:|=]"NOT id:23* AND (type:'test*' OR dept:'bob') && !filed:true"
|
|
//logical operators are case insensitive: , NOT AND OR ( ) ! && || // "," included for quoted/string legacy queries.
|
|
if(typeof query != "string"){
|
|
query = json.toJson(query);
|
|
query = query.replace(/\\\\/g,"\\"); //counter toJson expansion of backslashes, e.g., foo\\*bar test.
|
|
}
|
|
query = query.replace(/\\"/g,"\""); //ditto, for embedded \" in lieu of " availability.
|
|
var complexQuery = lang.trim(query.replace(/{|}/g,"")); //we can handle these, too.
|
|
var pos2, i;
|
|
if(complexQuery.match(/"? *complexQuery *"?:/)){ //case where widget required a json object, so use complexQuery:'the real query'
|
|
complexQuery = lang.trim(complexQuery.replace(/"?\s*complexQuery\s*"?:/,""));
|
|
var quotes = ["'",'"'];
|
|
var pos1,colon;
|
|
var flag = false;
|
|
for(i = 0; i<quotes.length; i++){
|
|
pos1 = complexQuery.indexOf(quotes[i]);
|
|
pos2 = complexQuery.indexOf(quotes[i],1);
|
|
colon = complexQuery.indexOf(":",1);
|
|
if(pos1 === 0 && pos2 != -1 && colon < pos2){
|
|
flag = true;
|
|
break;
|
|
} //first two sets of quotes don't occur before the first colon.
|
|
}
|
|
if(flag){ //dojo.toJson, and maybe user, adds surrounding quotes, which we need to remove.
|
|
complexQuery = complexQuery.replace(/^\"|^\'|\"$|\'$/g,"");
|
|
}
|
|
} //end query="{complexQuery:'id:1* || dept:Sales'}" parsing (for when widget required json object query).
|
|
var complexQuerySave = complexQuery;
|
|
//valid logical operators.
|
|
var begRegExp = /^,|^NOT |^AND |^OR |^\(|^\)|^!|^&&|^\|\|/i; //trailing space on some tokens on purpose.
|
|
var sQuery = ""; //will be eval'ed for each i-th candidateItem, based on query components.
|
|
var op = "";
|
|
var val = "";
|
|
var pos = -1;
|
|
var err = false;
|
|
var key = "";
|
|
var value = "";
|
|
var tok = "";
|
|
pos2 = -1;
|
|
for(i = 0; i < arrayOfItems.length; ++i){
|
|
var match = true;
|
|
var candidateItem = arrayOfItems[i];
|
|
if(candidateItem === null){
|
|
match = false;
|
|
}else{
|
|
//process entire string for this i-th candidateItem.
|
|
complexQuery = complexQuerySave; //restore query for next candidateItem.
|
|
sQuery = "";
|
|
//work left to right, finding either key:value pair or logical operator at the beginning of the complexQuery string.
|
|
//when found, concatenate to sQuery and remove from complexQuery and loop back.
|
|
while(complexQuery.length > 0 && !err){
|
|
op = complexQuery.match(begRegExp);
|
|
|
|
//get/process/append one or two leading logical operators.
|
|
while(op && !err){ //look for leading logical operators.
|
|
complexQuery = lang.trim(complexQuery.replace(op[0],""));
|
|
op = lang.trim(op[0]).toUpperCase();
|
|
//convert some logical operators to their javascript equivalents for later eval.
|
|
op = op == "NOT" ? "!" : op == "AND" || op == "," ? "&&" : op == "OR" ? "||" : op;
|
|
op = " " + op + " ";
|
|
sQuery += op;
|
|
op = complexQuery.match(begRegExp);
|
|
}//end op && !err
|
|
|
|
//now get/process/append one key:value pair.
|
|
if(complexQuery.length > 0){
|
|
pos = complexQuery.indexOf(":");
|
|
if(pos == -1){
|
|
err = true;
|
|
break;
|
|
}else{
|
|
key = lang.trim(complexQuery.substring(0,pos).replace(/\"|\'/g,""));
|
|
complexQuery = lang.trim(complexQuery.substring(pos + 1));
|
|
tok = complexQuery.match(/^\'|^\"/); //quoted?
|
|
if(tok){
|
|
tok = tok[0];
|
|
pos = complexQuery.indexOf(tok);
|
|
pos2 = complexQuery.indexOf(tok,pos + 1);
|
|
if(pos2 == -1){
|
|
err = true;
|
|
break;
|
|
}
|
|
value = complexQuery.substring(pos + 1,pos2);
|
|
if(pos2 == complexQuery.length - 1){ //quote is last character
|
|
complexQuery = "";
|
|
}else{
|
|
complexQuery = lang.trim(complexQuery.substring(pos2 + 1));
|
|
}
|
|
sQuery += self._containsValue(candidateItem, key, value, filterUtil.patternToRegExp(value, ignoreCase));
|
|
}
|
|
else{ //not quoted, so a space, comma, or closing parens (or the end) will be the break.
|
|
tok = complexQuery.match(/\s|\)|,/);
|
|
if(tok){
|
|
var pos3 = new Array(tok.length);
|
|
for(var j = 0;j<tok.length;j++){
|
|
pos3[j] = complexQuery.indexOf(tok[j]);
|
|
}
|
|
pos = pos3[0];
|
|
if(pos3.length > 1){
|
|
for(var j=1;j<pos3.length;j++){
|
|
pos = Math.min(pos,pos3[j]);
|
|
}
|
|
}
|
|
value = lang.trim(complexQuery.substring(0,pos));
|
|
complexQuery = lang.trim(complexQuery.substring(pos));
|
|
}else{ //not a space, so must be at the end of the complexQuery.
|
|
value = lang.trim(complexQuery);
|
|
complexQuery = "";
|
|
} //end inner if(tok) else
|
|
sQuery += self._containsValue(candidateItem, key, value, filterUtil.patternToRegExp(value, ignoreCase));
|
|
} //end outer if(tok) else
|
|
} //end found ":"
|
|
} //end if(complexQuery.length > 0)
|
|
} //end while complexQuery.length > 0 && !err, so finished the i-th item.
|
|
match = eval(sQuery);
|
|
} //end else is non-null candidateItem.
|
|
if(match){
|
|
items.push(candidateItem);
|
|
}
|
|
} //end for/next of all items.
|
|
if(err){
|
|
//soft fail.
|
|
items = [];
|
|
console.log("The store's _fetchItems failed, probably due to a syntax error in query.");
|
|
}
|
|
findCallback(items, requestArgs);
|
|
}else{
|
|
// No query...
|
|
// We want a copy to pass back in case the parent wishes to sort the array.
|
|
// We shouldn't allow resort of the internal list, so that multiple callers
|
|
// can get lists and sort without affecting each other. We also need to
|
|
// filter out any null values that have been left as a result of deleteItem()
|
|
// calls in ItemFileWriteStore.
|
|
for(var i = 0; i < arrayOfItems.length; ++i){
|
|
var item = arrayOfItems[i];
|
|
if(item !== null){
|
|
items.push(item);
|
|
}
|
|
}
|
|
findCallback(items, requestArgs);
|
|
} //end if there is a query.
|
|
}; //end filter function
|
|
|
|
if(this._loadFinished){
|
|
filter(keywordArgs, this._getItemsArray(keywordArgs.queryOptions));
|
|
}else{
|
|
if(this._jsonFileUrl !== this._ccUrl){
|
|
kernel.deprecated("dojox.data.AndOrReadStore: ",
|
|
"To change the url, set the url property of the store," +
|
|
" not _jsonFileUrl. _jsonFileUrl support will be removed in 2.0");
|
|
this._ccUrl = this._jsonFileUrl;
|
|
this.url = this._jsonFileUrl;
|
|
}else if(this.url !== this._ccUrl){
|
|
this._jsonFileUrl = this.url;
|
|
this._ccUrl = this.url;
|
|
}
|
|
//See if there was any forced reset of data.
|
|
if(this.data != null && this._jsonData == null){
|
|
this._jsonData = this.data;
|
|
this.data = null;
|
|
}
|
|
if(this._jsonFileUrl){
|
|
//If fetches come in before the loading has finished, but while
|
|
//a load is in progress, we have to defer the fetching to be
|
|
//invoked in the callback.
|
|
if(this._loadInProgress){
|
|
this._queuedFetches.push({args: keywordArgs, filter: filter});
|
|
}else{
|
|
this._loadInProgress = true;
|
|
var getArgs = {
|
|
url: self._jsonFileUrl,
|
|
handleAs: "json-comment-optional",
|
|
preventCache: this.urlPreventCache
|
|
};
|
|
var getHandler = xhr.get(getArgs);
|
|
getHandler.addCallback(function(data){
|
|
try{
|
|
self._getItemsFromLoadedData(data);
|
|
self._loadFinished = true;
|
|
self._loadInProgress = false;
|
|
|
|
filter(keywordArgs, self._getItemsArray(keywordArgs.queryOptions));
|
|
self._handleQueuedFetches();
|
|
}catch(e){
|
|
self._loadFinished = true;
|
|
self._loadInProgress = false;
|
|
errorCallback(e, keywordArgs);
|
|
}
|
|
});
|
|
getHandler.addErrback(function(error){
|
|
self._loadInProgress = false;
|
|
errorCallback(error, keywordArgs);
|
|
});
|
|
|
|
//Wire up the cancel to abort of the request
|
|
//This call cancel on the deferred if it hasn't been called
|
|
//yet and then will chain to the simple abort of the
|
|
//simpleFetch keywordArgs
|
|
var oldAbort = null;
|
|
if(keywordArgs.abort){
|
|
oldAbort = keywordArgs.abort;
|
|
}
|
|
keywordArgs.abort = function(){
|
|
var df = getHandler;
|
|
if(df && df.fired === -1){
|
|
df.cancel();
|
|
df = null;
|
|
}
|
|
if(oldAbort){
|
|
oldAbort.call(keywordArgs);
|
|
}
|
|
};
|
|
}
|
|
}else if(this._jsonData){
|
|
try{
|
|
this._loadFinished = true;
|
|
this._getItemsFromLoadedData(this._jsonData);
|
|
this._jsonData = null;
|
|
filter(keywordArgs, this._getItemsArray(keywordArgs.queryOptions));
|
|
}catch(e){
|
|
errorCallback(e, keywordArgs);
|
|
}
|
|
}else{
|
|
errorCallback(new Error("dojox.data.AndOrReadStore: No JSON source data was provided as either URL or a nested Javascript object."), keywordArgs);
|
|
}
|
|
} //end deferred fetching.
|
|
}, //end _fetchItems
|
|
|
|
_handleQueuedFetches: function(){
|
|
// summary:
|
|
// Internal function to execute delayed request in the store.
|
|
//Execute any deferred fetches now.
|
|
if(this._queuedFetches.length > 0){
|
|
for(var i = 0; i < this._queuedFetches.length; i++){
|
|
var fData = this._queuedFetches[i];
|
|
var delayedQuery = fData.args;
|
|
var delayedFilter = fData.filter;
|
|
if(delayedFilter){
|
|
delayedFilter(delayedQuery, this._getItemsArray(delayedQuery.queryOptions));
|
|
}else{
|
|
this.fetchItemByIdentity(delayedQuery);
|
|
}
|
|
}
|
|
this._queuedFetches = [];
|
|
}
|
|
},
|
|
|
|
_getItemsArray: function(/*object?*/queryOptions){
|
|
// summary:
|
|
// Internal function to determine which list of items to search over.
|
|
// queryOptions: The query options parameter, if any.
|
|
if(queryOptions && queryOptions.deep){
|
|
return this._arrayOfAllItems;
|
|
}
|
|
return this._arrayOfTopLevelItems;
|
|
},
|
|
|
|
close: function(/*dojo.data.api.Request || keywordArgs || null */ request){
|
|
// summary:
|
|
// See dojo.data.api.Read.close()
|
|
if(this.clearOnClose &&
|
|
this._loadFinished &&
|
|
!this._loadInProgress){
|
|
//Reset all internalsback to default state. This will force a reload
|
|
//on next fetch. This also checks that the data or url param was set
|
|
//so that the store knows it can get data. Without one of those being set,
|
|
//the next fetch will trigger an error.
|
|
|
|
if(((this._jsonFileUrl == "" || this._jsonFileUrl == null) &&
|
|
(this.url == "" || this.url == null)
|
|
) && this.data == null){
|
|
console.debug("dojox.data.AndOrReadStore: WARNING! Data reload " +
|
|
" information has not been provided." +
|
|
" Please set 'url' or 'data' to the appropriate value before" +
|
|
" the next fetch");
|
|
}
|
|
this._arrayOfAllItems = [];
|
|
this._arrayOfTopLevelItems = [];
|
|
this._loadFinished = false;
|
|
this._itemsByIdentity = null;
|
|
this._loadInProgress = false;
|
|
this._queuedFetches = [];
|
|
}
|
|
},
|
|
|
|
_getItemsFromLoadedData: function(/* Object */ dataObject){
|
|
// summary:
|
|
// Function to parse the loaded data into item format and build the internal items array.
|
|
// description:
|
|
// Function to parse the loaded data into item format and build the internal items array.
|
|
//
|
|
// dataObject:
|
|
// The JS data object containing the raw data to convery into item format.
|
|
//
|
|
// returns: array
|
|
// Array of items in store item format.
|
|
|
|
// First, we define a couple little utility functions...
|
|
|
|
var self = this;
|
|
function valueIsAnItem(/* anything */ aValue){
|
|
// summary:
|
|
// Given any sort of value that could be in the raw json data,
|
|
// return true if we should interpret the value as being an
|
|
// item itself, rather than a literal value or a reference.
|
|
// example:
|
|
// | false == valueIsAnItem("Kermit");
|
|
// | false == valueIsAnItem(42);
|
|
// | false == valueIsAnItem(new Date());
|
|
// | false == valueIsAnItem({_type:'Date', _value:'May 14, 1802'});
|
|
// | false == valueIsAnItem({_reference:'Kermit'});
|
|
// | true == valueIsAnItem({name:'Kermit', color:'green'});
|
|
// | true == valueIsAnItem({iggy:'pop'});
|
|
// | true == valueIsAnItem({foo:42});
|
|
var isItem = (
|
|
(aValue !== null) &&
|
|
(typeof aValue === "object") &&
|
|
(!lang.isArray(aValue)) &&
|
|
(!lang.isFunction(aValue)) &&
|
|
(aValue.constructor == Object) &&
|
|
(typeof aValue._reference === "undefined") &&
|
|
(typeof aValue._type === "undefined") &&
|
|
(typeof aValue._value === "undefined") &&
|
|
self.hierarchical
|
|
);
|
|
return isItem;
|
|
}
|
|
|
|
function addItemAndSubItemsToArrayOfAllItems(/* Item */ anItem){
|
|
self._arrayOfAllItems.push(anItem);
|
|
for(var attribute in anItem){
|
|
var valueForAttribute = anItem[attribute];
|
|
if(valueForAttribute){
|
|
if(lang.isArray(valueForAttribute)){
|
|
var valueArray = valueForAttribute;
|
|
for(var k = 0; k < valueArray.length; ++k){
|
|
var singleValue = valueArray[k];
|
|
if(valueIsAnItem(singleValue)){
|
|
addItemAndSubItemsToArrayOfAllItems(singleValue);
|
|
}
|
|
}
|
|
}else{
|
|
if(valueIsAnItem(valueForAttribute)){
|
|
addItemAndSubItemsToArrayOfAllItems(valueForAttribute);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
this._labelAttr = dataObject.label;
|
|
|
|
// We need to do some transformations to convert the data structure
|
|
// that we read from the file into a format that will be convenient
|
|
// to work with in memory.
|
|
|
|
// Step 1: Walk through the object hierarchy and build a list of all items
|
|
var i;
|
|
var item;
|
|
this._arrayOfAllItems = [];
|
|
this._arrayOfTopLevelItems = dataObject.items;
|
|
|
|
for(i = 0; i < this._arrayOfTopLevelItems.length; ++i){
|
|
item = this._arrayOfTopLevelItems[i];
|
|
addItemAndSubItemsToArrayOfAllItems(item);
|
|
item[this._rootItemPropName]=true;
|
|
}
|
|
|
|
// Step 2: Walk through all the attribute values of all the items,
|
|
// and replace single values with arrays. For example, we change this:
|
|
// { name:'Miss Piggy', pets:'Foo-Foo'}
|
|
// into this:
|
|
// { name:['Miss Piggy'], pets:['Foo-Foo']}
|
|
//
|
|
// We also store the attribute names so we can validate our store
|
|
// reference and item id special properties for the O(1) isItem
|
|
var allAttributeNames = {};
|
|
var key;
|
|
|
|
for(i = 0; i < this._arrayOfAllItems.length; ++i){
|
|
item = this._arrayOfAllItems[i];
|
|
for(key in item){
|
|
if(key !== this._rootItemPropName){
|
|
var value = item[key];
|
|
if(value !== null){
|
|
if(!lang.isArray(value)){
|
|
item[key] = [value];
|
|
}
|
|
}else{
|
|
item[key] = [null];
|
|
}
|
|
}
|
|
allAttributeNames[key]=key;
|
|
}
|
|
}
|
|
|
|
// Step 3: Build unique property names to use for the _storeRefPropName and _itemNumPropName
|
|
// This should go really fast, it will generally never even run the loop.
|
|
while(allAttributeNames[this._storeRefPropName]){
|
|
this._storeRefPropName += "_";
|
|
}
|
|
while(allAttributeNames[this._itemNumPropName]){
|
|
this._itemNumPropName += "_";
|
|
}
|
|
while(allAttributeNames[this._reverseRefMap]){
|
|
this._reverseRefMap += "_";
|
|
}
|
|
|
|
// Step 4: Some data files specify an optional 'identifier', which is
|
|
// the name of an attribute that holds the identity of each item.
|
|
// If this data file specified an identifier attribute, then build a
|
|
// hash table of items keyed by the identity of the items.
|
|
var arrayOfValues;
|
|
|
|
var identifier = dataObject.identifier;
|
|
if(identifier){
|
|
this._itemsByIdentity = {};
|
|
this._features['dojo.data.api.Identity'] = identifier;
|
|
for(i = 0; i < this._arrayOfAllItems.length; ++i){
|
|
item = this._arrayOfAllItems[i];
|
|
arrayOfValues = item[identifier];
|
|
var identity = arrayOfValues[0];
|
|
if(!this._itemsByIdentity[identity]){
|
|
this._itemsByIdentity[identity] = item;
|
|
}else{
|
|
if(this._jsonFileUrl){
|
|
throw new Error("dojox.data.AndOrReadStore: The json data as specified by: [" + this._jsonFileUrl + "] is malformed. Items within the list have identifier: [" + identifier + "]. Value collided: [" + identity + "]");
|
|
}else if(this._jsonData){
|
|
throw new Error("dojox.data.AndOrReadStore: The json data provided by the creation arguments is malformed. Items within the list have identifier: [" + identifier + "]. Value collided: [" + identity + "]");
|
|
}
|
|
}
|
|
}
|
|
}else{
|
|
this._features['dojo.data.api.Identity'] = Number;
|
|
}
|
|
|
|
// Step 5: Walk through all the items, and set each item's properties
|
|
// for _storeRefPropName and _itemNumPropName, so that store.isItem() will return true.
|
|
for(i = 0; i < this._arrayOfAllItems.length; ++i){
|
|
item = this._arrayOfAllItems[i];
|
|
item[this._storeRefPropName] = this;
|
|
item[this._itemNumPropName] = i;
|
|
}
|
|
|
|
// Step 6: We walk through all the attribute values of all the items,
|
|
// looking for type/value literals and item-references.
|
|
//
|
|
// We replace item-references with pointers to items. For example, we change:
|
|
// { name:['Kermit'], friends:[{_reference:{name:'Miss Piggy'}}] }
|
|
// into this:
|
|
// { name:['Kermit'], friends:[miss_piggy] }
|
|
// (where miss_piggy is the object representing the 'Miss Piggy' item).
|
|
//
|
|
// We replace type/value pairs with typed-literals. For example, we change:
|
|
// { name:['Nelson Mandela'], born:[{_type:'Date', _value:'July 18, 1918'}] }
|
|
// into this:
|
|
// { name:['Kermit'], born:(new Date('July 18, 1918')) }
|
|
//
|
|
// We also generate the associate map for all items for the O(1) isItem function.
|
|
for(i = 0; i < this._arrayOfAllItems.length; ++i){
|
|
item = this._arrayOfAllItems[i]; // example: { name:['Kermit'], friends:[{_reference:{name:'Miss Piggy'}}] }
|
|
for(key in item){
|
|
arrayOfValues = item[key]; // example: [{_reference:{name:'Miss Piggy'}}]
|
|
for(var j = 0; j < arrayOfValues.length; ++j){
|
|
value = arrayOfValues[j]; // example: {_reference:{name:'Miss Piggy'}}
|
|
if(value !== null && typeof value == "object"){
|
|
if(("_type" in value) && ("_value" in value)){
|
|
var type = value._type; // examples: 'Date', 'Color', or 'ComplexNumber'
|
|
var mappingObj = this._datatypeMap[type]; // examples: Date, dojo.Color, foo.math.ComplexNumber, {type: dojo.Color, deserialize(value){ return new dojo.Color(value)}}
|
|
if(!mappingObj){
|
|
throw new Error("dojox.data.AndOrReadStore: in the typeMap constructor arg, no object class was specified for the datatype '" + type + "'");
|
|
}else if(lang.isFunction(mappingObj)){
|
|
arrayOfValues[j] = new mappingObj(value._value);
|
|
}else if(lang.isFunction(mappingObj.deserialize)){
|
|
arrayOfValues[j] = mappingObj.deserialize(value._value);
|
|
}else{
|
|
throw new Error("dojox.data.AndOrReadStore: Value provided in typeMap was neither a constructor, nor a an object with a deserialize function");
|
|
}
|
|
}
|
|
if(value._reference){
|
|
var referenceDescription = value._reference; // example: {name:'Miss Piggy'}
|
|
if(!lang.isObject(referenceDescription)){
|
|
// example: 'Miss Piggy'
|
|
// from an item like: { name:['Kermit'], friends:[{_reference:'Miss Piggy'}]}
|
|
arrayOfValues[j] = this._getItemByIdentity(referenceDescription);
|
|
}else{
|
|
// example: {name:'Miss Piggy'}
|
|
// from an item like: { name:['Kermit'], friends:[{_reference:{name:'Miss Piggy'}}] }
|
|
for(var k = 0; k < this._arrayOfAllItems.length; ++k){
|
|
var candidateItem = this._arrayOfAllItems[k];
|
|
var found = true;
|
|
for(var refKey in referenceDescription){
|
|
if(candidateItem[refKey] != referenceDescription[refKey]){
|
|
found = false;
|
|
}
|
|
}
|
|
if(found){
|
|
arrayOfValues[j] = candidateItem;
|
|
}
|
|
}
|
|
}
|
|
if(this.referenceIntegrity){
|
|
var refItem = arrayOfValues[j];
|
|
if(this.isItem(refItem)){
|
|
this._addReferenceToMap(refItem, item, key);
|
|
}
|
|
}
|
|
}else if(this.isItem(value)){
|
|
//It's a child item (not one referenced through _reference).
|
|
//We need to treat this as a referenced item, so it can be cleaned up
|
|
//in a write store easily.
|
|
if(this.referenceIntegrity){
|
|
this._addReferenceToMap(value, item, key);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
},
|
|
|
|
_addReferenceToMap: function(/*item*/ refItem, /*item*/ parentItem, /*string*/ attribute){
|
|
// summary:
|
|
// Method to add an reference map entry for an item and attribute.
|
|
// description:
|
|
// Method to add an reference map entry for an item and attribute. //
|
|
// refItem:
|
|
// The item that is referenced.
|
|
// parentItem:
|
|
// The item that holds the new reference to refItem.
|
|
// attribute:
|
|
// The attribute on parentItem that contains the new reference.
|
|
|
|
//Stub function, does nothing. Real processing is in ItemFileWriteStore.
|
|
},
|
|
|
|
getIdentity: function(/* item */ item){
|
|
// summary:
|
|
// See dojo.data.api.Identity.getIdentity()
|
|
var identifier = this._features['dojo.data.api.Identity'];
|
|
if(identifier === Number){
|
|
return item[this._itemNumPropName]; // Number
|
|
}else{
|
|
var arrayOfValues = item[identifier];
|
|
if(arrayOfValues){
|
|
return arrayOfValues[0]; // Object || String
|
|
}
|
|
}
|
|
return null; // null
|
|
},
|
|
|
|
fetchItemByIdentity: function(/* Object */ keywordArgs){
|
|
// summary:
|
|
// See dojo.data.api.Identity.fetchItemByIdentity()
|
|
|
|
// Hasn't loaded yet, we have to trigger the load.
|
|
if(!this._loadFinished){
|
|
var self = this;
|
|
if(this._jsonFileUrl !== this._ccUrl){
|
|
kernel.deprecated("dojox.data.AndOrReadStore: ",
|
|
"To change the url, set the url property of the store," +
|
|
" not _jsonFileUrl. _jsonFileUrl support will be removed in 2.0");
|
|
this._ccUrl = this._jsonFileUrl;
|
|
this.url = this._jsonFileUrl;
|
|
}else if(this.url !== this._ccUrl){
|
|
this._jsonFileUrl = this.url;
|
|
this._ccUrl = this.url;
|
|
}
|
|
//See if there was any forced reset of data.
|
|
if(this.data != null && this._jsonData == null){
|
|
this._jsonData = this.data;
|
|
this.data = null;
|
|
}
|
|
if(this._jsonFileUrl){
|
|
|
|
if(this._loadInProgress){
|
|
this._queuedFetches.push({args: keywordArgs});
|
|
}else{
|
|
this._loadInProgress = true;
|
|
var getArgs = {
|
|
url: self._jsonFileUrl,
|
|
handleAs: "json-comment-optional",
|
|
preventCache: this.urlPreventCache
|
|
};
|
|
var getHandler = xhr.get(getArgs);
|
|
getHandler.addCallback(function(data){
|
|
var scope = keywordArgs.scope?keywordArgs.scope:winUtil.global;
|
|
try{
|
|
self._getItemsFromLoadedData(data);
|
|
self._loadFinished = true;
|
|
self._loadInProgress = false;
|
|
var item = self._getItemByIdentity(keywordArgs.identity);
|
|
if(keywordArgs.onItem){
|
|
keywordArgs.onItem.call(scope, item);
|
|
}
|
|
self._handleQueuedFetches();
|
|
}catch(error){
|
|
self._loadInProgress = false;
|
|
if(keywordArgs.onError){
|
|
keywordArgs.onError.call(scope, error);
|
|
}
|
|
}
|
|
});
|
|
getHandler.addErrback(function(error){
|
|
self._loadInProgress = false;
|
|
if(keywordArgs.onError){
|
|
var scope = keywordArgs.scope?keywordArgs.scope:winUtil.global;
|
|
keywordArgs.onError.call(scope, error);
|
|
}
|
|
});
|
|
}
|
|
|
|
}else if(this._jsonData){
|
|
// Passed in data, no need to xhr.
|
|
self._getItemsFromLoadedData(self._jsonData);
|
|
self._jsonData = null;
|
|
self._loadFinished = true;
|
|
var item = self._getItemByIdentity(keywordArgs.identity);
|
|
if(keywordArgs.onItem){
|
|
var scope = keywordArgs.scope?keywordArgs.scope:winUtil.global;
|
|
keywordArgs.onItem.call(scope, item);
|
|
}
|
|
}
|
|
}else{
|
|
// Already loaded. We can just look it up and call back.
|
|
var item = this._getItemByIdentity(keywordArgs.identity);
|
|
if(keywordArgs.onItem){
|
|
var scope = keywordArgs.scope?keywordArgs.scope:winUtil.global;
|
|
keywordArgs.onItem.call(scope, item);
|
|
}
|
|
}
|
|
},
|
|
|
|
_getItemByIdentity: function(/* Object */ identity){
|
|
// summary:
|
|
// Internal function to look an item up by its identity map.
|
|
var item = null;
|
|
if(this._itemsByIdentity){
|
|
item = this._itemsByIdentity[identity];
|
|
}else{
|
|
item = this._arrayOfAllItems[identity];
|
|
}
|
|
if(item === undefined){
|
|
item = null;
|
|
}
|
|
return item; // Object
|
|
},
|
|
|
|
getIdentityAttributes: function(/* item */ item){
|
|
// summary:
|
|
// See dojo.data.api.Identity.getIdentifierAttributes()
|
|
|
|
var identifier = this._features['dojo.data.api.Identity'];
|
|
if(identifier === Number){
|
|
// If (identifier === Number) it means getIdentity() just returns
|
|
// an integer item-number for each item. The dojo.data.api.Identity
|
|
// spec says we need to return null if the identity is not composed
|
|
// of attributes
|
|
return null; // null
|
|
}else{
|
|
return [identifier]; // Array
|
|
}
|
|
},
|
|
|
|
_forceLoad: function(){
|
|
// summary:
|
|
// Internal function to force a load of the store if it hasn't occurred yet. This is required
|
|
// for specific functions to work properly.
|
|
var self = this;
|
|
if(this._jsonFileUrl !== this._ccUrl){
|
|
kernel.deprecated("dojox.data.AndOrReadStore: ",
|
|
"To change the url, set the url property of the store," +
|
|
" not _jsonFileUrl. _jsonFileUrl support will be removed in 2.0");
|
|
this._ccUrl = this._jsonFileUrl;
|
|
this.url = this._jsonFileUrl;
|
|
}else if(this.url !== this._ccUrl){
|
|
this._jsonFileUrl = this.url;
|
|
this._ccUrl = this.url;
|
|
}
|
|
//See if there was any forced reset of data.
|
|
if(this.data != null && this._jsonData == null){
|
|
this._jsonData = this.data;
|
|
this.data = null;
|
|
}
|
|
if(this._jsonFileUrl){
|
|
var getArgs = {
|
|
url: self._jsonFileUrl,
|
|
handleAs: "json-comment-optional",
|
|
preventCache: this.urlPreventCache,
|
|
sync: true
|
|
};
|
|
var getHandler = xhr.get(getArgs);
|
|
getHandler.addCallback(function(data){
|
|
try{
|
|
//Check to be sure there wasn't another load going on concurrently
|
|
//So we don't clobber data that comes in on it. If there is a load going on
|
|
//then do not save this data. It will potentially clobber current data.
|
|
//We mainly wanted to sync/wait here.
|
|
//TODO: Revisit the loading scheme of this store to improve multi-initial
|
|
//request handling.
|
|
if(self._loadInProgress !== true && !self._loadFinished){
|
|
self._getItemsFromLoadedData(data);
|
|
self._loadFinished = true;
|
|
}else if(self._loadInProgress){
|
|
//Okay, we hit an error state we can't recover from. A forced load occurred
|
|
//while an async load was occurring. Since we cannot block at this point, the best
|
|
//that can be managed is to throw an error.
|
|
throw new Error("dojox.data.AndOrReadStore: Unable to perform a synchronous load, an async load is in progress.");
|
|
}
|
|
}catch(e){
|
|
console.log(e);
|
|
throw e;
|
|
}
|
|
});
|
|
getHandler.addErrback(function(error){
|
|
throw error;
|
|
});
|
|
}else if(this._jsonData){
|
|
self._getItemsFromLoadedData(self._jsonData);
|
|
self._jsonData = null;
|
|
self._loadFinished = true;
|
|
}
|
|
}
|
|
});
|
|
//Mix in the simple fetch implementation to this class.
|
|
lang.extend(AndOrReadStore, simpleFetch);
|
|
|
|
return AndOrReadStore;
|
|
});
|
|
|
|
|