Sfoglia il codice sorgente

make_model: support wheres and be aware of ambient changes

master
Bing Sun 5 anni fa
parent
commit
f653cd9960
Firmato da: sunb ID Chiave GPG: F7795F8C590626AB
1 ha cambiato i file con 69 aggiunte e 74 eliminazioni
  1. +69
    -74
      src/util/make_model.js

+ 69
- 74
src/util/make_model.js Vedi File

@@ -31,14 +31,20 @@ export default (options={}) => {
endpoint_type: options.endpoint.startsWith('/rpc') ? 'function' : 'relation',
// query part
selects: options.selects,
wheres: options.wheres,
order: options.order
}

// private model cache & meta-info
let _cache = {
// upstream (data) cache
data: null,
count: null,
upstream_limit: null
upstream_limit: null,

// local cache
ambient: null,
last_ambient: null,
}

// some random variables to make things work
@@ -46,15 +52,35 @@ export default (options={}) => {
let _promise = null

// construct the model
return {
let _model = {
// reflection methods
configs() { return _configs },
cache() { return _cache },
data(offset=0, limit=Infinity) { return _cache.data && _cache.data.slice(offset, offset+limit) || [] },
ambient_changed() {
return _cache.ambient != _cache.last_ambient
},
reset() {
let ambient_queries = [
...(_configs.selects ? _configs.selects : []),
...(_configs.wheres ? _configs.wheres.map(query => ({label: query.label, op: query.op, value: typeof query.value == 'function' ? query.value() : query.value})) : []),
...(_configs.order ? _configs.order : [])
]

_cache.last_ambient = _cache.ambient
_cache.ambient =JSON.stringify(ambient_queries)

if (this.ambient_changed()) {
_cache.data = []
_cache.count = null
_cache.upstream_limit = null
_xhr = null
//this.fully_loaded = false
}
},

// main methods
select(offset=0, limit=Infinity) {
// initialize data (singular or plural)
_cache.data = _cache.data || []

// normalize limit
@@ -80,13 +106,14 @@ export default (options={}) => {
config: xhr => _xhr = xhr,
queries: [
// transform model state to postgest queries
//...ambient_queries,
// selects
...(_configs.selects ? [{
label: 'select',
value: _configs.selects.map(select => select.alias ? select.alias + ':' + select.label : select.label).join(',')
}] : []),

...(_configs.wheres ? _configs.wheres : []),

// order
...(_configs.order ? [{
label: 'order',
@@ -102,89 +129,57 @@ export default (options={}) => {
// limit/offset
offset == 0 ? undefined : {label: 'offset', value: offset},
limit == Infinity ? undefined : {label: 'limit', value: limit}
]
}).then(response => {
// gather begin/end/count
let [_range, _count] = _xhr.getResponseHeader('content-range').split('/')
let [_begin, _end] = _range.split('-').map(v => ~~v)

// update count if presented
if (_count != '*') _cache.count = _count

// see if an upstream limit is exposed
if (_end - _begin + 1 < limit) _cache.upstream_limit = _end - _begin + 1

// fill the data cache
response.forEach((data, i) => {
// process values
_configs.selects && _configs.selects
.filter(select => select.processor)
.forEach(select => data[select.alias || select.label] = select.processor(data[select.alias || select.label]))

// save the data
_cache.data[_begin + i] = data
})

// assert offset/limit and returned range
if (offset != _begin || _end - _begin + 1 > limit)
throw 'The request and response data range mismatches!'
if (_end - _begin + 1 < limit)
console.warn('The response range is narrower than requested, probably due to an upstream hard limit.')

// clean model state
_promise = null

// return data
return _cache.data.slice(_begin, _end + 1)
]
}).then(response => {
// gather begin/end/count
let [_range, _count] = _xhr.getResponseHeader('content-range').split('/')
let [_begin, _end] = _range.split('-').map(v => ~~v)

// update count if presented
if (_count != '*') _cache.count = _count

// see if an upstream limit is exposed
if (_end - _begin + 1 < limit) _cache.upstream_limit = _end - _begin + 1

// fill the data cache
response.forEach((data, i) => {
// process values
_configs.selects && _configs.selects
.filter(select => select.processor)
.forEach(select => data[select.alias || select.label] = select.processor(data[select.alias || select.label]))

// save the data
_cache.data[_begin + i] = data
})

// assert offset/limit and returned range
if (offset != _begin || _end - _begin + 1 > limit)
throw 'The request and response data range mismatches!'
if (_end - _begin + 1 < limit)
console.warn('The response range is narrower than requested, probably due to an upstream hard limit.')

// clean model state
_promise = null
this.reset()

// return data
return _cache.data.slice(_begin, _end + 1)
})

return _promise
}
}

_model.reset()
return _model
}

export const default1 = (args) => {
// private model configs
let _configs = {
queries: args.queries || [],
}
return {
// model & model state
fully_loaded: false,

// ambient
ambient: null,
last_ambient: null,
ambient_changed() {
return this.ambient != this.last_ambient
},
reset() {
// assemble queries for api.request
let ambient_queries = [
...(_configs.selects ? [{label: 'select', value: Array.from(_configs.selects, ([label, config]) => config.alias ? config.alias + ':' + label : label).join(',')}] : []),
...(_configs.queries ? _configs.queries.map(query => ({label: query.label, op: query.op, value: typeof query.value == 'function' ? query.value() : query.value})) : []),
...(_configs.order ? [{label: 'order', value: _configs.order.map(o => [o.label, o.direction, o.nulls ? 'nulls'+o.nulls : ''].filter(a => a).join('.')).join(',')}] : [])
]

this.last_ambient = this.ambient
this.ambient =JSON.stringify(ambient_queries)

if (this.ambient_changed()) {
this.list = []
this.count = null
this.fully_loaded = false
this.xhr = null
}

return ambient_queries
},

// load data
load(args) {
let ambient_queries = this.reset()
// now the hard work
},

// load full model if privileged
load_full() {
this.reset()


Caricamento…
Annulla
Salva