Finish ChunkBuffer implementation and integrate it with Artifact
This commit is contained in:
parent
b4022f5f67
commit
b289d3966b
|
@ -9,76 +9,50 @@ require 'travis/chunk_buffer'
|
||||||
init: ->
|
init: ->
|
||||||
@_super.apply this, arguments
|
@_super.apply this, arguments
|
||||||
|
|
||||||
@addObserver 'job.id', @fetchBody
|
@addObserver 'job.id', @fetch
|
||||||
@fetchBody()
|
@fetch()
|
||||||
|
|
||||||
@set 'queue', Ember.A([])
|
@set 'parts', Travis.ChunkBuffer.create(content: [])
|
||||||
@set 'parts', Ember.ArrayProxy.create(content: [])
|
|
||||||
|
|
||||||
@addObserver 'body', @fetchWorker
|
#@addObserver 'body', @fetchWorker
|
||||||
@fetchWorker()
|
#@fetchWorker()
|
||||||
|
|
||||||
id: (->
|
id: (->
|
||||||
@get('job.id')
|
@get('job.id')
|
||||||
).property('job.id')
|
).property('job.id')
|
||||||
|
|
||||||
|
willDestroy: ->
|
||||||
|
@get('parts').destroy()
|
||||||
|
|
||||||
clear: ->
|
clear: ->
|
||||||
@set('body', '')
|
@set('body', '')
|
||||||
@incrementProperty('version')
|
@incrementProperty('version')
|
||||||
|
@get('parts').destroy()
|
||||||
|
@set 'parts', Travis.ChunkBuffer.create(content: [])
|
||||||
|
|
||||||
fetchBody: ->
|
fetch: ->
|
||||||
if jobId = @get('job.id')
|
if jobId = @get('job.id')
|
||||||
@removeObserver 'job.id', @fetchBody
|
@removeObserver 'job.id', @fetch
|
||||||
|
|
||||||
self = this
|
handlers =
|
||||||
Travis.ajax.ajax "/jobs/#{jobId}/log.txt?cors_hax=true", 'GET',
|
json: (json) => @loadParts(json['log']['parts'])
|
||||||
dataType: 'text'
|
text: (text) => @loadText(text)
|
||||||
contentType: 'text/plain'
|
|
||||||
success: (data, textStatus, xhr) ->
|
|
||||||
if xhr.status == 204
|
|
||||||
logUrl = xhr.getResponseHeader('X-Log-Location')
|
|
||||||
|
|
||||||
# For some reason not all browsers can fetch this header
|
Travis.Artifact.Request.create(id: id, handlers: handlers).run() if id = @get('job.id')
|
||||||
unless logUrl
|
|
||||||
logUrl = self.s3Url("/jobs/#{jobId}/log.txt")
|
|
||||||
|
|
||||||
$.ajax
|
append: (part) ->
|
||||||
url: logUrl
|
@get('parts').pushObject(part)
|
||||||
type: 'GET'
|
|
||||||
success: (data) ->
|
|
||||||
self.fetchedBody(data)
|
|
||||||
else
|
|
||||||
self.fetchedBody(data)
|
|
||||||
|
|
||||||
s3Url: (path) ->
|
loadParts: (parts) ->
|
||||||
endpoint = Travis.config.api_endpoint
|
console.log 'artifact model: load parts'
|
||||||
staging = if endpoint.match(/-staging/) then '-staging' else ''
|
@append(part) for part in parts
|
||||||
host = Travis.config.api_endpoint.replace(/^https?:\/\//, '').split('.').slice(-2).join('.')
|
@set('isLoaded', true)
|
||||||
"https://s3.amazonaws.com/archive#{staging}.#{host}#{path}"
|
|
||||||
|
|
||||||
|
loadText: (text) ->
|
||||||
fetchedBody: (body) ->
|
console.log 'artifact model: load text'
|
||||||
@set 'body', body
|
number = -1
|
||||||
@set 'isLoaded', true
|
@append(number: 1, content: text)
|
||||||
|
@set('isLoaded', true)
|
||||||
append: (body) ->
|
|
||||||
if @get('isInitialized')
|
|
||||||
@get('parts').pushObject body
|
|
||||||
@set('body', @get('body') + body)
|
|
||||||
else
|
|
||||||
@get('queue').pushObject(body)
|
|
||||||
|
|
||||||
recordDidLoad: (->
|
|
||||||
if @get('isLoaded')
|
|
||||||
if (body = @get 'body') && @get('parts.length') == 0
|
|
||||||
@get('parts').pushObject body
|
|
||||||
|
|
||||||
@set 'isInitialized', true
|
|
||||||
|
|
||||||
queue = @get('queue')
|
|
||||||
if queue.get('length') > 0
|
|
||||||
@append queue.toArray().join('')
|
|
||||||
).observes('isLoaded')
|
|
||||||
|
|
||||||
fetchWorker: ->
|
fetchWorker: ->
|
||||||
if !@get('workerName') && (body = @get('body'))
|
if !@get('workerName') && (body = @get('body'))
|
||||||
|
@ -88,3 +62,38 @@ require 'travis/chunk_buffer'
|
||||||
worker = worker.trim().split(':')[0]
|
worker = worker.trim().split(':')[0]
|
||||||
@set('workerName', worker)
|
@set('workerName', worker)
|
||||||
@removeObserver 'body', @fetchWorker
|
@removeObserver 'body', @fetchWorker
|
||||||
|
|
||||||
|
Travis.Artifact.Request = Em.Object.extend
|
||||||
|
HEADERS:
|
||||||
|
accept: 'application/vnd.travis-ci.2+json; chunked=true; version=2, text/plain; version=2'
|
||||||
|
|
||||||
|
run: ->
|
||||||
|
Travis.ajax.ajax "/jobs/#{@id}/log?cors_hax=true", 'GET',
|
||||||
|
dataType: 'text'
|
||||||
|
headers: @HEADERS
|
||||||
|
success: (body, status, xhr) => @handle(body, status, xhr)
|
||||||
|
|
||||||
|
handle: (body, status, xhr) ->
|
||||||
|
if xhr.status == 204
|
||||||
|
$.ajax(url: @redirectTo(xhr), type: 'GET', success: @handlers.text)
|
||||||
|
else if @isJson(xhr, body)
|
||||||
|
@handlers.json(JSON.parse(body))
|
||||||
|
else
|
||||||
|
@handlers.text(body)
|
||||||
|
|
||||||
|
redirectTo: (xhr) ->
|
||||||
|
# Firefox can't see the Location header on the xhr response due to the wrong
|
||||||
|
# status code 204. Should be some redirect code but that doesn't work with CORS.
|
||||||
|
xhr.getResponseHeader('Location') || @s3Url()
|
||||||
|
|
||||||
|
s3Url: ->
|
||||||
|
endpoint = Travis.config.api_endpoint
|
||||||
|
staging = if endpoint.match(/-staging/) then '-staging' else ''
|
||||||
|
host = endpoint.replace(/^https?:\/\//, '').split('.').slice(-2).join('.')
|
||||||
|
"https://s3.amazonaws.com/archive#{staging}.#{host}#{path}/jobs/#{@id}/log.txt"
|
||||||
|
|
||||||
|
isJson: (xhr, body) ->
|
||||||
|
# Firefox can't see the Content-Type header on the xhr response due to the wrong
|
||||||
|
# status code 204. Should be some redirect code but that doesn't work with CORS.
|
||||||
|
type = xhr.getResponseHeader('Content-Type') || ''
|
||||||
|
type.indexOf('json') > -1 || body.slice(0, 8) == '{"log":{'
|
||||||
|
|
|
@ -97,7 +97,8 @@ Travis.Store = DS.Store.extend
|
||||||
|
|
||||||
if event == 'job:log'
|
if event == 'job:log'
|
||||||
if job = @find(Travis.Job, data['job']['id'])
|
if job = @find(Travis.Job, data['job']['id'])
|
||||||
job.appendLog(data['job']['_log'])
|
console.log 'job:log', data
|
||||||
|
job.appendLog(number: data['job']['number'], content: data['job']['_log'])
|
||||||
else if data[type.singularName()]
|
else if data[type.singularName()]
|
||||||
@_loadOne(this, type, data)
|
@_loadOne(this, type, data)
|
||||||
else if data[type.pluralName()]
|
else if data[type.pluralName()]
|
||||||
|
|
|
@ -104,7 +104,7 @@
|
||||||
job = @get('job')
|
job = @get('job')
|
||||||
job.subscribe() if job && !job.get('isFinished')
|
job.subscribe() if job && !job.get('isFinished')
|
||||||
null
|
null
|
||||||
).property('job', 'job.state')
|
).property('job', 'job.isFinished')
|
||||||
|
|
||||||
logUrl: (->
|
logUrl: (->
|
||||||
repo = @get('job.repo')
|
repo = @get('job.repo')
|
||||||
|
@ -133,7 +133,7 @@
|
||||||
@_super.apply this, arguments
|
@_super.apply this, arguments
|
||||||
|
|
||||||
Ember.run.next this, ->
|
Ember.run.next this, ->
|
||||||
if @get 'log.isInitialized'
|
if @get 'log'
|
||||||
@logDidChange()
|
@logDidChange()
|
||||||
|
|
||||||
willDestroy: ->
|
willDestroy: ->
|
||||||
|
@ -148,9 +148,9 @@
|
||||||
).observes('log.version')
|
).observes('log.version')
|
||||||
|
|
||||||
logDidChange: (->
|
logDidChange: (->
|
||||||
if @get('log.isInitialized') && @state == 'inDOM'
|
if @get('log') && @state == 'inDOM'
|
||||||
@attachLogObservers()
|
@attachLogObservers()
|
||||||
).observes('log', 'log.isInitialized')
|
).observes('log')
|
||||||
|
|
||||||
attachLogObservers: ->
|
attachLogObservers: ->
|
||||||
return if @get('logPartsObserversAttached') == Ember.guidFor(@get('log'))
|
return if @get('logPartsObserversAttached') == Ember.guidFor(@get('log'))
|
||||||
|
|
|
@ -1,13 +1,20 @@
|
||||||
Travis.ChunkBuffer = Em.ArrayProxy.extend Ember.MutableEnumerable,
|
get = Ember.get
|
||||||
timeout: 15000
|
|
||||||
start: 0
|
Travis.ChunkBuffer = Em.ArrayProxy.extend
|
||||||
next: 0
|
timeout: 5000
|
||||||
|
checkTimeoutFrequency: 1000
|
||||||
|
start: 1
|
||||||
|
next: 1
|
||||||
|
|
||||||
init: ->
|
init: ->
|
||||||
@_super.apply this, arguments
|
@_super.apply this, arguments
|
||||||
|
|
||||||
|
@lastInsert = 0
|
||||||
|
|
||||||
@set('next', @get('start'))
|
@set('next', @get('start'))
|
||||||
|
|
||||||
|
@checkTimeout()
|
||||||
|
|
||||||
if @get('content.length')
|
if @get('content.length')
|
||||||
@get('queue.content').pushObjects @get('content').toArray()
|
@get('queue.content').pushObjects @get('content').toArray()
|
||||||
|
|
||||||
|
@ -33,13 +40,13 @@ Travis.ChunkBuffer = Em.ArrayProxy.extend Ember.MutableEnumerable,
|
||||||
).property()
|
).property()
|
||||||
|
|
||||||
contentArrayDidChange: (array, index, removedCount, addedCount) ->
|
contentArrayDidChange: (array, index, removedCount, addedCount) ->
|
||||||
console.log 'content array did change'
|
|
||||||
@_super.apply this, arguments
|
@_super.apply this, arguments
|
||||||
|
|
||||||
if addedCount
|
if addedCount
|
||||||
queue = @get('queue.content')
|
queue = @get('queue.content')
|
||||||
queue.pushObjects array.slice(index, index + addedCount)
|
queue.pushObjects array.slice(index, index + addedCount)
|
||||||
@check()
|
@check()
|
||||||
|
@inserted()
|
||||||
|
|
||||||
check: ->
|
check: ->
|
||||||
queue = @get('queue')
|
queue = @get('queue')
|
||||||
|
@ -48,10 +55,35 @@ Travis.ChunkBuffer = Em.ArrayProxy.extend Ember.MutableEnumerable,
|
||||||
arrangedContent = @get('arrangedContent')
|
arrangedContent = @get('arrangedContent')
|
||||||
toPush = []
|
toPush = []
|
||||||
|
|
||||||
while queue.get('firstObject.number') == next
|
while queue.get('firstObject.number') <= next
|
||||||
toPush.pushObject queue.shiftObject().get('content')
|
element = queue.shiftObject()
|
||||||
next += 1
|
if get(element, 'number') == next
|
||||||
|
toPush.pushObject get(element, 'content')
|
||||||
|
next += 1
|
||||||
|
|
||||||
arrangedContent.pushObjects toPush if toPush.length
|
if toPush.length
|
||||||
|
arrangedContent.pushObjects toPush
|
||||||
|
|
||||||
@set('next', next)
|
@set('next', next)
|
||||||
|
|
||||||
|
inserted: ->
|
||||||
|
now = @now()
|
||||||
|
@lastInsert = now
|
||||||
|
|
||||||
|
checkTimeout: ->
|
||||||
|
now = @now()
|
||||||
|
if now - @lastInsert > @get('timeout')
|
||||||
|
@giveUpOnMissingParts()
|
||||||
|
@set 'runLaterId', Ember.run.later(this, @checkTimeout, @get('checkTimeoutFrequency'))
|
||||||
|
|
||||||
|
willDestroy: ->
|
||||||
|
Ember.run.cancel @get('runLaterId')
|
||||||
|
@_super.apply this, arguments
|
||||||
|
|
||||||
|
now: ->
|
||||||
|
(new Date()).getTime()
|
||||||
|
|
||||||
|
giveUpOnMissingParts: ->
|
||||||
|
if number = @get('queue.firstObject.number')
|
||||||
|
@set('next', number)
|
||||||
|
@check()
|
||||||
|
|
|
@ -26,11 +26,6 @@ minispade.require 'app'
|
||||||
runs ->
|
runs ->
|
||||||
foo = 'bar'
|
foo = 'bar'
|
||||||
|
|
||||||
_Date = Date
|
|
||||||
@Date = (date) ->
|
|
||||||
new _Date(date || '2012-07-02T00:03:00Z')
|
|
||||||
@Date.UTC = _Date.UTC
|
|
||||||
|
|
||||||
# hacks for missing features in webkit
|
# hacks for missing features in webkit
|
||||||
unless Function::bind
|
unless Function::bind
|
||||||
Function::bind = (oThis) ->
|
Function::bind = (oThis) ->
|
||||||
|
|
|
@ -5,48 +5,61 @@ describe 'Travis.ChunkBuffer', ->
|
||||||
it 'waits for parts to be in order before revealing them', ->
|
it 'waits for parts to be in order before revealing them', ->
|
||||||
buffer = Travis.ChunkBuffer.create(content: [])
|
buffer = Travis.ChunkBuffer.create(content: [])
|
||||||
|
|
||||||
buffer.pushObject createChunk(2, "baz")
|
buffer.pushObject createChunk(3, "baz")
|
||||||
buffer.pushObject createChunk(1, "bar")
|
buffer.pushObject createChunk(2, "bar")
|
||||||
|
|
||||||
expect(buffer.get('length')).toEqual(0)
|
expect(buffer.get('length')).toEqual(0)
|
||||||
|
|
||||||
buffer.pushObject createChunk(0, "foo")
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
|
||||||
expect(buffer.get('length')).toEqual(3)
|
expect(buffer.get('length')).toEqual(3)
|
||||||
|
|
||||||
expect(buffer.toArray()).toEqual(['foo', 'bar', 'baz'])
|
expect(buffer.toArray()).toEqual(['foo', 'bar', 'baz'])
|
||||||
|
|
||||||
it 'ignores a part if it fails to be delivered within timeout', ->
|
it 'ignores a part if it fails to be delivered within timeout', ->
|
||||||
expect 4
|
buffer = Travis.ChunkBuffer.create(content: [], timeout: 20, checkTimeoutFrequency: 5)
|
||||||
|
|
||||||
buffer = Travis.ChunkBuffer.create(content: [], timeout: 10)
|
buffer.pushObject createChunk(3, "baz")
|
||||||
|
|
||||||
buffer.pushObject createChunk(2, "baz")
|
|
||||||
|
|
||||||
expect(buffer.get('length')).toEqual(0)
|
expect(buffer.get('length')).toEqual(0)
|
||||||
|
|
||||||
buffer.pushObject createChunk(0, "foo")
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
|
||||||
expect(buffer.get('length')).toEqual(1)
|
expect(buffer.get('length')).toEqual(1)
|
||||||
|
|
||||||
stop()
|
waits 40
|
||||||
setTimeout( (->
|
runs ->
|
||||||
expect(buffer.get('length')).toEqual(2)
|
expect(buffer.get('length')).toEqual(2)
|
||||||
expect(buffer.toArray()).toEqual(['foo', 'bar', 'baz'])
|
expect(buffer.toArray()).toEqual(['foo', 'baz'])
|
||||||
), 20)
|
|
||||||
|
buffer.destroy()
|
||||||
|
|
||||||
it 'works correctly when parts are passed as content', ->
|
it 'works correctly when parts are passed as content', ->
|
||||||
content = [createChunk(1, 'bar')]
|
content = [createChunk(2, 'bar')]
|
||||||
|
|
||||||
buffer = Travis.ChunkBuffer.create(content: content)
|
buffer = Travis.ChunkBuffer.create(content: content)
|
||||||
|
|
||||||
expect(buffer.get('length')).toEqual(0)
|
expect(buffer.get('length')).toEqual(0)
|
||||||
|
|
||||||
buffer.pushObject createChunk(0, "foo")
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
|
||||||
expect(buffer.get('length')).toEqual(2)
|
expect(buffer.get('length')).toEqual(2)
|
||||||
expect(buffer.toArray()).toEqual(['foo', 'bar'])
|
expect(buffer.toArray()).toEqual(['foo', 'bar'])
|
||||||
|
|
||||||
|
it 'works correctly when parts duplicated', ->
|
||||||
|
buffer = Travis.ChunkBuffer.create(content: [])
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
buffer.pushObject createChunk(2, "bar")
|
||||||
|
buffer.pushObject createChunk(3, "baz")
|
||||||
|
|
||||||
|
buffer.pushObject createChunk(2, "bar")
|
||||||
|
buffer.pushObject createChunk(3, "baz")
|
||||||
|
buffer.pushObject createChunk(4, "qux")
|
||||||
|
|
||||||
|
expect(buffer.get('length')).toEqual(4)
|
||||||
|
expect(buffer.toArray()).toEqual(['foo', 'bar', 'baz', 'qux'])
|
||||||
|
|
||||||
it 'fires array observers properly', ->
|
it 'fires array observers properly', ->
|
||||||
changes = []
|
changes = []
|
||||||
buffer = Travis.ChunkBuffer.create(content: [])
|
buffer = Travis.ChunkBuffer.create(content: [])
|
||||||
|
@ -64,12 +77,12 @@ describe 'Travis.ChunkBuffer', ->
|
||||||
changes.pushObject([index, addedCount])
|
changes.pushObject([index, addedCount])
|
||||||
).create(content: buffer)
|
).create(content: buffer)
|
||||||
|
|
||||||
buffer.pushObject createChunk(1, "baz")
|
buffer.pushObject createChunk(2, "baz")
|
||||||
|
|
||||||
expect(buffer.get('length')).toEqual(0)
|
expect(buffer.get('length')).toEqual(0)
|
||||||
expect(changes.length).toEqual(0)
|
expect(changes.length).toEqual(0)
|
||||||
|
|
||||||
buffer.pushObject createChunk(0, "foo")
|
buffer.pushObject createChunk(1, "foo")
|
||||||
|
|
||||||
expect(buffer.get('length')).toEqual(2)
|
expect(buffer.get('length')).toEqual(2)
|
||||||
expect(changes.length).toEqual(1)
|
expect(changes.length).toEqual(1)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user