blob: 0e58d8c19ad60e8d6fd4381a247453350e168df9 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_
#define GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_
#include <queue>
#include "base/atomicops.h"
#include "base/atomic_ref_count.h"
#include "base/callback.h"
#include "base/memory/linked_ptr.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_ptr.h"
#include "base/memory/weak_ptr.h"
#include "base/shared_memory.h"
#include "gpu/command_buffer/common/command_buffer.h"
#include "gpu/command_buffer/service/cmd_buffer_engine.h"
#include "gpu/command_buffer/service/cmd_parser.h"
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#include "gpu/gpu_export.h"
namespace gfx {
class GLFence;
}
namespace gpu {
class PreemptionFlag
: public base::RefCountedThreadSafe<PreemptionFlag> {
public:
PreemptionFlag() : flag_(0) {}
bool IsSet() { return !base::AtomicRefCountIsZero(&flag_); }
void Set() { base::AtomicRefCountInc(&flag_); }
void Reset() { base::subtle::NoBarrier_Store(&flag_, 0); }
private:
base::AtomicRefCount flag_;
~PreemptionFlag() {}
friend class base::RefCountedThreadSafe<PreemptionFlag>;
};
// This class schedules commands that have been flushed. They are received via
// a command buffer and forwarded to a command parser. TODO(apatrick): This
// class should not know about the decoder. Do not add additional dependencies
// on it.
class GPU_EXPORT GpuScheduler
: NON_EXPORTED_BASE(public CommandBufferEngine),
public base::SupportsWeakPtr<GpuScheduler> {
public:
GpuScheduler(CommandBuffer* command_buffer,
AsyncAPIInterface* handler,
gles2::GLES2Decoder* decoder);
virtual ~GpuScheduler();
void PutChanged();
void SetPreemptByFlag(scoped_refptr<PreemptionFlag> flag) {
preemption_flag_ = flag;
}
// Sets whether commands should be processed by this scheduler. Setting to
// false unschedules. Setting to true reschedules. Whether or not the
// scheduler is currently scheduled is "reference counted". Every call with
// false must eventually be paired by a call with true.
void SetScheduled(bool is_scheduled);
// Returns whether the scheduler is currently able to process more commands.
bool IsScheduled();
// Returns whether the scheduler needs to be polled again in the future.
bool HasMoreWork();
typedef base::Callback<void(bool /* scheduled */)> SchedulingChangedCallback;
// Sets a callback that is invoked just before scheduler is rescheduled
// or descheduled. Takes ownership of callback object.
void SetSchedulingChangedCallback(const SchedulingChangedCallback& callback);
// Implementation of CommandBufferEngine.
virtual Buffer GetSharedMemoryBuffer(int32 shm_id) OVERRIDE;
virtual void set_token(int32 token) OVERRIDE;
virtual bool SetGetBuffer(int32 transfer_buffer_id) OVERRIDE;
virtual bool SetGetOffset(int32 offset) OVERRIDE;
virtual int32 GetGetOffset() OVERRIDE;
void SetCommandProcessedCallback(const base::Closure& callback);
void DeferToFence(base::Closure task);
// Polls the fences, invoking callbacks that were waiting to be triggered
// by them and returns whether all fences were complete.
bool PollUnscheduleFences();
CommandParser* parser() const {
return parser_.get();
}
bool IsPreempted();
private:
// Artificially reschedule if the scheduler is still unscheduled after a
// timeout.
void RescheduleTimeOut();
// The GpuScheduler holds a weak reference to the CommandBuffer. The
// CommandBuffer owns the GpuScheduler and holds a strong reference to it
// through the ProcessCommands callback.
CommandBuffer* command_buffer_;
// The parser uses this to execute commands.
AsyncAPIInterface* handler_;
// Does not own decoder. TODO(apatrick): The GpuScheduler shouldn't need a
// pointer to the decoder, it is only used to initialize the CommandParser,
// which could be an argument to the constructor, and to determine the
// reason for context lost.
gles2::GLES2Decoder* decoder_;
// TODO(apatrick): The GpuScheduler currently creates and owns the parser.
// This should be an argument to the constructor.
scoped_ptr<CommandParser> parser_;
// Greater than zero if this is waiting to be rescheduled before continuing.
int unscheduled_count_;
// The number of times this scheduler has been artificially rescheduled on
// account of a timeout.
int rescheduled_count_;
// A factory for outstanding rescheduling tasks that is invalidated whenever
// the scheduler is rescheduled.
base::WeakPtrFactory<GpuScheduler> reschedule_task_factory_;
// The GpuScheduler will unschedule itself in the event that further GL calls
// are issued to it before all these fences have been crossed by the GPU.
struct UnscheduleFence {
UnscheduleFence(gfx::GLFence* fence, base::Closure task);
~UnscheduleFence();
scoped_ptr<gfx::GLFence> fence;
base::Closure task;
};
std::queue<linked_ptr<UnscheduleFence> > unschedule_fences_;
SchedulingChangedCallback scheduling_changed_callback_;
base::Closure descheduled_callback_;
base::Closure command_processed_callback_;
// If non-NULL and |preemption_flag_->IsSet()|, exit PutChanged early.
scoped_refptr<PreemptionFlag> preemption_flag_;
bool was_preempted_;
DISALLOW_COPY_AND_ASSIGN(GpuScheduler);
};
} // namespace gpu
#endif // GPU_COMMAND_BUFFER_SERVICE_GPU_SCHEDULER_H_
|