Merge branch 'ps-chunked-buffer'
This commit is contained in:
commit
97304ccd74
|
@ -1,4 +1,5 @@
|
||||||
require 'travis/model'
|
require 'travis/model'
|
||||||
|
require 'travis/chunk_buffer'
|
||||||
|
|
||||||
@Travis.Artifact = Em.Object.extend
|
@Travis.Artifact = Em.Object.extend
|
||||||
version: 1 # used to refresh log on requeue
|
version: 1 # used to refresh log on requeue
|
||||||
|
@ -8,82 +9,88 @@ require 'travis/model'
|
||||||
init: ->
|
init: ->
|
||||||
@_super.apply this, arguments
|
@_super.apply this, arguments
|
||||||
|
|
||||||
@addObserver 'job.id', @fetchBody
|
@addObserver 'job.id', @fetch
|
||||||
@fetchBody()
|
@fetch()
|
||||||
|
|
||||||
@set 'queue', Ember.A([])
|
@set 'parts', Travis.ChunkBuffer.create(content: [])
|
||||||
@set 'parts', Ember.ArrayProxy.create(content: [])
|
|
||||||
|
|
||||||
@addObserver 'body', @fetchWorker
|
|
||||||
@fetchWorker()
|
|
||||||
|
|
||||||
id: (->
|
id: (->
|
||||||
@get('job.id')
|
@get('job.id')
|
||||||
).property('job.id')
|
).property('job.id')
|
||||||
|
|
||||||
|
willDestroy: ->
|
||||||
|
@get('parts').destroy()
|
||||||
|
|
||||||
clear: ->
|
clear: ->
|
||||||
@set('body', '')
|
@set('body', '')
|
||||||
@incrementProperty('version')
|
@incrementProperty('version')
|
||||||
|
@get('parts').destroy()
|
||||||
|
@set 'parts', Travis.ChunkBuffer.create(content: [])
|
||||||
|
|
||||||
fetchBody: ->
|
fetch: ->
|
||||||
if jobId = @get('job.id')
|
if jobId = @get('job.id')
|
||||||
@removeObserver 'job.id', @fetchBody
|
@removeObserver 'job.id', @fetch
|
||||||
|
|
||||||
self = this
|
handlers =
|
||||||
Travis.ajax.ajax "/jobs/#{jobId}/log.txt?cors_hax=true", 'GET',
|
json: (json) => @loadParts(json['log']['parts'])
|
||||||
dataType: 'text'
|
text: (text) => @loadText(text)
|
||||||
contentType: 'text/plain'
|
|
||||||
success: (data, textStatus, xhr) ->
|
|
||||||
if xhr.status == 204
|
|
||||||
logUrl = xhr.getResponseHeader('X-Log-Location')
|
|
||||||
|
|
||||||
# For some reason not all browsers can fetch this header
|
Travis.Artifact.Request.create(id: id, handlers: handlers).run() if id = @get('job.id')
|
||||||
unless logUrl
|
|
||||||
logUrl = self.s3Url("/jobs/#{jobId}/log.txt")
|
|
||||||
|
|
||||||
$.ajax
|
append: (part) ->
|
||||||
url: logUrl
|
@fetchWorker Ember.get(part, 'content')
|
||||||
type: 'GET'
|
@get('parts').pushObject(part)
|
||||||
success: (data) ->
|
|
||||||
self.fetchedBody(data)
|
|
||||||
else
|
|
||||||
self.fetchedBody(data)
|
|
||||||
|
|
||||||
s3Url: (path) ->
|
loadParts: (parts) ->
|
||||||
endpoint = Travis.config.api_endpoint
|
console.log 'artifact model: load parts'
|
||||||
staging = if endpoint.match(/-staging/) then '-staging' else ''
|
@append(part) for part in parts
|
||||||
host = Travis.config.api_endpoint.replace(/^https?:\/\//, '').split('.').slice(-2).join('.')
|
@set('isLoaded', true)
|
||||||
"https://s3.amazonaws.com/archive#{staging}.#{host}#{path}"
|
|
||||||
|
|
||||||
|
loadText: (text) ->
|
||||||
|
console.log 'artifact model: load text'
|
||||||
|
number = -1
|
||||||
|
@append(number: 1, content: text)
|
||||||
|
@set('isLoaded', true)
|
||||||
|
|
||||||
fetchedBody: (body) ->
|
fetchWorker: (string) ->
|
||||||
@set 'body', body
|
if !@get('workerName')
|
||||||
@set 'isLoaded', true
|
line = string.split("\n")[0]
|
||||||
|
|
||||||
append: (body) ->
|
|
||||||
if @get('isInitialized')
|
|
||||||
@get('parts').pushObject body
|
|
||||||
@set('body', @get('body') + body)
|
|
||||||
else
|
|
||||||
@get('queue').pushObject(body)
|
|
||||||
|
|
||||||
recordDidLoad: (->
|
|
||||||
if @get('isLoaded')
|
|
||||||
if (body = @get 'body') && @get('parts.length') == 0
|
|
||||||
@get('parts').pushObject body
|
|
||||||
|
|
||||||
@set 'isInitialized', true
|
|
||||||
|
|
||||||
queue = @get('queue')
|
|
||||||
if queue.get('length') > 0
|
|
||||||
@append queue.toArray().join('')
|
|
||||||
).observes('isLoaded')
|
|
||||||
|
|
||||||
fetchWorker: ->
|
|
||||||
if !@get('workerName') && (body = @get('body'))
|
|
||||||
line = body.split("\n")[0]
|
|
||||||
if line && (match = line.match /Using worker: (.*)/)
|
if line && (match = line.match /Using worker: (.*)/)
|
||||||
if worker = match[1]
|
if worker = match[1]
|
||||||
worker = worker.trim().split(':')[0]
|
worker = worker.trim().split(':')[0]
|
||||||
@set('workerName', worker)
|
@set('workerName', worker)
|
||||||
@removeObserver 'body', @fetchWorker
|
|
||||||
|
Travis.Artifact.Request = Em.Object.extend
|
||||||
|
HEADERS:
|
||||||
|
accept: 'application/vnd.travis-ci.2+json; chunked=true; version=2, text/plain; version=2'
|
||||||
|
|
||||||
|
run: ->
|
||||||
|
Travis.ajax.ajax "/jobs/#{@id}/log?cors_hax=true", 'GET',
|
||||||
|
dataType: 'text'
|
||||||
|
headers: @HEADERS
|
||||||
|
success: (body, status, xhr) => @handle(body, status, xhr)
|
||||||
|
|
||||||
|
handle: (body, status, xhr) ->
|
||||||
|
if xhr.status == 204
|
||||||
|
$.ajax(url: @redirectTo(xhr), type: 'GET', success: @handlers.text)
|
||||||
|
else if @isJson(xhr, body)
|
||||||
|
@handlers.json(JSON.parse(body))
|
||||||
|
else
|
||||||
|
@handlers.text(body)
|
||||||
|
|
||||||
|
redirectTo: (xhr) ->
|
||||||
|
# Firefox can't see the Location header on the xhr response due to the wrong
|
||||||
|
# status code 204. Should be some redirect code but that doesn't work with CORS.
|
||||||
|
xhr.getResponseHeader('Location') || @s3Url()
|
||||||
|
|
||||||
|
s3Url: ->
|
||||||
|
endpoint = Travis.config.api_endpoint
|
||||||
|
staging = if endpoint.match(/-staging/) then '-staging' else ''
|
||||||
|
host = endpoint.replace(/^https?:\/\//, '').split('.').slice(-2).join('.')
|
||||||
|
"https://s3.amazonaws.com/archive#{staging}.#{host}#{path}/jobs/#{@id}/log.txt"
|
||||||
|
|
||||||
|
isJson: (xhr, body) ->
|
||||||
|
# Firefox can't see the Content-Type header on the xhr response due to the wrong
|
||||||
|
# status code 204. Should be some redirect code but that doesn't work with CORS.
|
||||||
|
type = xhr.getResponseHeader('Content-Type') || ''
|
||||||
|
type.indexOf('json') > -1 || body.slice(0, 8) == '{"log":{'
|
||||||
|
|
|
@ -97,7 +97,7 @@ Travis.Store = DS.Store.extend
|
||||||
|
|
||||||
if event == 'job:log'
|
if event == 'job:log'
|
||||||
if job = @find(Travis.Job, data['job']['id'])
|
if job = @find(Travis.Job, data['job']['id'])
|
||||||
job.appendLog(data['job']['_log'])
|
job.appendLog(number: data['job']['number'], content: data['job']['_log'])
|
||||||
else if data[type.singularName()]
|
else if data[type.singularName()]
|
||||||
@_loadOne(this, type, data)
|
@_loadOne(this, type, data)
|
||||||
else if data[type.pluralName()]
|
else if data[type.pluralName()]
|
||||||
|
|
|
@ -104,7 +104,7 @@
|
||||||
job = @get('job')
|
job = @get('job')
|
||||||
job.subscribe() if job && !job.get('isFinished')
|
job.subscribe() if job && !job.get('isFinished')
|
||||||
null
|
null
|
||||||
).property('job', 'job.state')
|
).property('job', 'job.isFinished')
|
||||||
|
|
||||||
logUrl: (->
|
logUrl: (->
|
||||||
repo = @get('job.repo')
|
repo = @get('job.repo')
|
||||||
|
@ -133,7 +133,7 @@
|
||||||
@_super.apply this, arguments
|
@_super.apply this, arguments
|
||||||
|
|
||||||
Ember.run.next this, ->
|
Ember.run.next this, ->
|
||||||
if @get 'log.isInitialized'
|
if @get 'log'
|
||||||
@logDidChange()
|
@logDidChange()
|
||||||
|
|
||||||
willDestroy: ->
|
willDestroy: ->
|
||||||
|
@ -148,9 +148,9 @@
|
||||||
).observes('log.version')
|
).observes('log.version')
|
||||||
|
|
||||||
logDidChange: (->
|
logDidChange: (->
|
||||||
if @get('log.isInitialized') && @state == 'inDOM'
|
if @get('log') && @state == 'inDOM'
|
||||||
@attachLogObservers()
|
@attachLogObservers()
|
||||||
).observes('log', 'log.isInitialized')
|
).observes('log')
|
||||||
|
|
||||||
attachLogObservers: ->
|
attachLogObservers: ->
|
||||||
return if @get('logPartsObserversAttached') == Ember.guidFor(@get('log'))
|
return if @get('logPartsObserversAttached') == Ember.guidFor(@get('log'))
|
||||||
|
|
92
assets/scripts/lib/travis/chunk_buffer.coffee
Normal file
92
assets/scripts/lib/travis/chunk_buffer.coffee
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
get = Ember.get
|
||||||
|
|
||||||
|
Travis.ChunkBuffer = Em.ArrayProxy.extend
|
||||||
|
timeout: 5000
|
||||||
|
checkTimeoutFrequency: 1000
|
||||||
|
start: 1
|
||||||
|
next: 1
|
||||||
|
|
||||||
|
init: ->
|
||||||
|
@_super.apply this, arguments
|
||||||
|
|
||||||
|
@lastInsert = 0
|
||||||
|
|
||||||
|
@set('next', @get('start'))
|
||||||
|
|
||||||
|
@checkTimeout()
|
||||||
|
|
||||||
|
if @get('content.length')
|
||||||
|
@get('queue.content').pushObjects @get('content').toArray()
|
||||||
|
|
||||||
|
arrangedContent: (->
|
||||||
|
[]
|
||||||
|
).property('content')
|
||||||
|
|
||||||
|
addObject: (obj) ->
|
||||||
|
@get('content').pushObject(obj)
|
||||||
|
|
||||||
|
removeObject: (obj) ->
|
||||||
|
@get('content').removeObject(obj)
|
||||||
|
|
||||||
|
replaceContent: (idx, amt, objects) ->
|
||||||
|
@get('content').replace(idx, amt, objects)
|
||||||
|
|
||||||
|
queue: (->
|
||||||
|
Em.ArrayProxy.create(Em.SortableMixin,
|
||||||
|
content: []
|
||||||
|
sortProperties: ['number']
|
||||||
|
sortAscending: true
|
||||||
|
)
|
||||||
|
).property()
|
||||||
|
|
||||||
|
contentArrayDidChange: (array, index, removedCount, addedCount) ->
|
||||||
|
@_super.apply this, arguments
|
||||||
|
|
||||||
|
if addedCount
|
||||||
|
queue = @get('queue.content')
|
||||||
|
addedObjects = array.slice(index, index + addedCount)
|
||||||
|
console.log 'Added log parts with numbers:', addedObjects.map( (element) -> get(element, 'number') )+'', 'current', @get('next')
|
||||||
|
queue.pushObjects addedObjects
|
||||||
|
@check()
|
||||||
|
@inserted()
|
||||||
|
|
||||||
|
check: ->
|
||||||
|
queue = @get('queue')
|
||||||
|
next = @get('next')
|
||||||
|
|
||||||
|
arrangedContent = @get('arrangedContent')
|
||||||
|
toPush = []
|
||||||
|
|
||||||
|
while queue.get('firstObject.number') <= next
|
||||||
|
element = queue.shiftObject()
|
||||||
|
if get(element, 'number') == next
|
||||||
|
toPush.pushObject get(element, 'content')
|
||||||
|
next += 1
|
||||||
|
|
||||||
|
if toPush.length
|
||||||
|
arrangedContent.pushObjects toPush
|
||||||
|
|
||||||
|
@set('next', next)
|
||||||
|
|
||||||
|
inserted: ->
|
||||||
|
now = @now()
|
||||||
|
@lastInsert = now
|
||||||
|
|
||||||
|
checkTimeout: ->
|
||||||
|
now = @now()
|
||||||
|
if now - @lastInsert > @get('timeout')
|
||||||
|
@giveUpOnMissingParts()
|
||||||
|
@set 'runLaterId', Ember.run.later(this, @checkTimeout, @get('checkTimeoutFrequency'))
|
||||||
|
|
||||||
|
willDestroy: ->
|
||||||
|
Ember.run.cancel @get('runLaterId')
|
||||||
|
@_super.apply this, arguments
|
||||||
|
|
||||||
|
now: ->
|
||||||
|
(new Date()).getTime()
|
||||||
|
|
||||||
|
giveUpOnMissingParts: ->
|
||||||
|
if number = @get('queue.firstObject.number')
|
||||||
|
console.log 'Giving up on missing parts in the buffer, switching to:', number
|
||||||
|
@set('next', number)
|
||||||
|
@check()
|
|
@ -26,11 +26,6 @@ minispade.require 'app'
|
||||||
runs ->
|
runs ->
|
||||||
foo = 'bar'
|
foo = 'bar'
|
||||||
|
|
||||||
_Date = Date
|
|
||||||
@Date = (date) ->
|
|
||||||
new _Date(date || '2012-07-02T00:03:00Z')
|
|
||||||
@Date.UTC = _Date.UTC
|
|
||||||
|
|
||||||
# hacks for missing features in webkit
|
# hacks for missing features in webkit
|
||||||
unless Function::bind
|
unless Function::bind
|
||||||
Function::bind = (oThis) ->
|
Function::bind = (oThis) ->
|
||||||
|
|
93
assets/scripts/spec/unit/chunk_buffer_spec.coffee
Normal file
93
assets/scripts/spec/unit/chunk_buffer_spec.coffee
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
createChunk = (number, content) ->
|
||||||
|
Em.Object.create(number: number, content: content)
|
||||||
|
|
||||||
|
describe 'Travis.ChunkBuffer', ->
|
||||||
|
it 'waits for parts to be in order before revealing them', ->
|
||||||
|
buffer = Travis.ChunkBuffer.create(content: [])
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(3, "baz")
|
||||||
|
buffer.pushObject createChunk(2, "bar")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(0)
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(3)
|
||||||
|
|
||||||
|
expect(buffer.toArray()).toEqual(['foo', 'bar', 'baz'])
|
||||||
|
|
||||||
|
it 'ignores a part if it fails to be delivered within timeout', ->
|
||||||
|
buffer = Travis.ChunkBuffer.create(content: [], timeout: 20, checkTimeoutFrequency: 5)
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(3, "baz")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(0)
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(1)
|
||||||
|
|
||||||
|
waits 40
|
||||||
|
runs ->
|
||||||
|
expect(buffer.get('length')).toEqual(2)
|
||||||
|
expect(buffer.toArray()).toEqual(['foo', 'baz'])
|
||||||
|
|
||||||
|
buffer.destroy()
|
||||||
|
|
||||||
|
it 'works correctly when parts are passed as content', ->
|
||||||
|
content = [createChunk(2, 'bar')]
|
||||||
|
|
||||||
|
buffer = Travis.ChunkBuffer.create(content: content)
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(0)
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(2)
|
||||||
|
expect(buffer.toArray()).toEqual(['foo', 'bar'])
|
||||||
|
|
||||||
|
it 'works correctly when parts duplicated', ->
|
||||||
|
buffer = Travis.ChunkBuffer.create(content: [])
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
buffer.pushObject createChunk(2, "bar")
|
||||||
|
buffer.pushObject createChunk(3, "baz")
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(2, "bar")
|
||||||
|
buffer.pushObject createChunk(3, "baz")
|
||||||
|
buffer.pushObject createChunk(4, "qux")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(4)
|
||||||
|
expect(buffer.toArray()).toEqual(['foo', 'bar', 'baz', 'qux'])
|
||||||
|
|
||||||
|
it 'fires array observers properly', ->
|
||||||
|
changes = []
|
||||||
|
buffer = Travis.ChunkBuffer.create(content: [])
|
||||||
|
|
||||||
|
observer = Em.Object.extend(
|
||||||
|
init: ->
|
||||||
|
@_super.apply this, arguments
|
||||||
|
|
||||||
|
@get('content').addArrayObserver this,
|
||||||
|
willChange: 'arrayWillChange',
|
||||||
|
didChange: 'arrayDidChange'
|
||||||
|
|
||||||
|
arrayWillChange: (->)
|
||||||
|
arrayDidChange: (array, index, removedCount, addedCount) ->
|
||||||
|
changes.pushObject([index, addedCount])
|
||||||
|
).create(content: buffer)
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(2, "baz")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(0)
|
||||||
|
expect(changes.length).toEqual(0)
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(2)
|
||||||
|
expect(changes.length).toEqual(1)
|
||||||
|
expect(changes[0]).toEqual([0, 2])
|
||||||
|
|
||||||
|
it 'sets next to start if start is given at init', ->
|
||||||
|
buffer = Travis.ChunkBuffer.create(content: [], start: 5)
|
||||||
|
expect(buffer.get('next')).toEqual(5)
|
Loading…
Reference in New Issue
Block a user