mirror of
https://github.com/ARM-software/workload-automation.git
synced 2025-09-04 20:32:36 +01:00
Compare commits
944 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
1364ec05e8 | ||
|
d5c888cc90 | ||
|
d6ab68bffc | ||
|
30e9b553ff | ||
|
6a3f441064 | ||
|
13cbe2f059 | ||
|
53b173c55f | ||
|
f598c60514 | ||
|
ceda8e74bf | ||
|
173c71b867 | ||
|
d88d35be26 | ||
|
599452d41f | ||
|
33dae51536 | ||
|
f8950dea33 | ||
|
136d1fef0f | ||
|
5204383582 | ||
|
bfa1d8dd62 | ||
|
b75fdf85d0 | ||
|
fcbb83f5ac | ||
|
807003128e | ||
|
3e4d068eff | ||
|
a3936afb4c | ||
|
24000a21df | ||
|
e5c0ca85f0 | ||
|
b4026ae390 | ||
|
f76c00dd99 | ||
|
b1e375a676 | ||
|
1102ba1679 | ||
|
6d999301f3 | ||
|
e4fdf0bdb9 | ||
|
d493b1e790 | ||
|
47e31765b4 | ||
|
79faec120e | ||
|
66dbe7a508 | ||
|
80a780dcfe | ||
|
694d51ffb6 | ||
|
1befe63e45 | ||
|
27b08bade0 | ||
|
1477a89ee4 | ||
|
0dfbbae7b6 | ||
|
a8a8d21de6 | ||
|
f467f6f991 | ||
|
4352e02806 | ||
|
693d0544a3 | ||
|
eb239c65d0 | ||
|
f7e4232eaa | ||
|
8cbf189029 | ||
|
6e45e1a039 | ||
|
b6f770cfc5 | ||
|
c7de8cabd6 | ||
|
fd7df36a5a | ||
|
607187ad5b | ||
|
b4036c5f15 | ||
|
b6e077c06b | ||
|
8e0b793f89 | ||
|
8b82451230 | ||
|
64c352fab6 | ||
|
68697a42a7 | ||
|
ae4ae3da5e | ||
|
393abc267f | ||
|
751970f991 | ||
|
254e9fff38 | ||
|
088709f290 | ||
|
850fcb24ab | ||
|
ace41d10a5 | ||
|
cb53fe9ec8 | ||
|
4e161127e1 | ||
|
bf43bf93bc | ||
|
7a19046645 | ||
|
5db11462be | ||
|
7bf0e3c344 | ||
|
d16d8bf62d | ||
|
c93cc81aac | ||
|
9491763aa7 | ||
|
a172c8f624 | ||
|
33286ba982 | ||
|
f69e4c5b18 | ||
|
5b543d2edf | ||
|
17edb13eb9 | ||
|
40d281b336 | ||
|
74ea78aa42 | ||
|
8edce40301 | ||
|
642d757066 | ||
|
969201968e | ||
|
3f76920fa9 | ||
|
46b78d35be | ||
|
ba34b973ac | ||
|
6d173f2f3f | ||
|
8b1d2c9fe9 | ||
|
d3c59e2f74 | ||
|
fce04d2938 | ||
|
27df426c0d | ||
|
f8966bf324 | ||
|
e076d47a7b | ||
|
ea798aefb3 | ||
|
dcf13f8c2c | ||
|
f99c6f5656 | ||
|
359d9d3e5f | ||
|
d56f581a0a | ||
|
f12cf6d557 | ||
|
311c4e419f | ||
|
2becd94381 | ||
|
1daa7f97c0 | ||
|
7af5868c22 | ||
|
2e1ce49170 | ||
|
23eb357e9e | ||
|
71c5d23d97 | ||
|
edfef444fb | ||
|
3a7a5276e4 | ||
|
f179b09978 | ||
|
620fbfdd2a | ||
|
4213e8e7d1 | ||
|
9fffa7958a | ||
|
0f2bc17eca | ||
|
558e40698b | ||
|
41b52178bb | ||
|
8aa1bdc63d | ||
|
8355fcf886 | ||
|
fa7d89d734 | ||
|
4649fa13db | ||
|
10dd2b304e | ||
|
93fbb7282a | ||
|
edc26fe75c | ||
|
6aecf1b35e | ||
|
4c5e008609 | ||
|
5bed658045 | ||
|
8ac5657993 | ||
|
fc26daecfc | ||
|
0232341445 | ||
|
3b052cc619 | ||
|
3a9505d54e | ||
|
4e94ff9ed7 | ||
|
c47ae5cfcf | ||
|
ce11b94f28 | ||
|
765fdd7cbb | ||
|
77d724efa3 | ||
|
acb9dd61e7 | ||
|
cc7684986a | ||
|
e69aea4e69 | ||
|
2b6f036d9a | ||
|
5738d19114 | ||
|
53ae47bff3 | ||
|
1c8e18bf36 | ||
|
5dbf7e7d38 | ||
|
a2945d58cb | ||
|
8727fe514a | ||
|
6465e732fd | ||
|
f9ec869c7b | ||
|
1bbd3ef87a | ||
|
486ade6499 | ||
|
1a23bd03a2 | ||
|
5018a1ec94 | ||
|
fe58245843 | ||
|
006bf6387f | ||
|
19569816d3 | ||
|
1dfbaf4ebe | ||
|
5b7d61b4b9 | ||
|
b5dc5b8648 | ||
|
2202326c02 | ||
|
8608c3b747 | ||
|
9afe084f2c | ||
|
83ab1ac441 | ||
|
ea1d13c37f | ||
|
20996e9a58 | ||
|
3711f7316d | ||
|
d279cc7453 | ||
|
f6b8fd3f4b | ||
|
ff2f88fbd7 | ||
|
96f4ade874 | ||
|
ac0256e377 | ||
|
793af6253f | ||
|
5ef7d2dd44 | ||
|
cf8cb5bfab | ||
|
9376c6875b | ||
|
a6347f5833 | ||
|
38a7e01e83 | ||
|
e18366b3f8 | ||
|
b9701201a3 | ||
|
441ba974b7 | ||
|
a33df50ce8 | ||
|
52d4635fe8 | ||
|
14924ec6f4 | ||
|
3d610788a3 | ||
|
1986511ae8 | ||
|
175e7f3cc0 | ||
|
392a3f1600 | ||
|
b5c0bdb0eb | ||
|
502b0ed4b3 | ||
|
cab9d918ab | ||
|
e686e89b39 | ||
|
b510b31052 | ||
|
5b59d101ef | ||
|
7713f02252 | ||
|
0a2afdfd84 | ||
|
530714c61c | ||
|
67f418f79f | ||
|
64860a2d1a | ||
|
f57dd83d1a | ||
|
3782a33060 | ||
|
8e27794124 | ||
|
9d4aa4983a | ||
|
b426e00f2f | ||
|
07d34e5615 | ||
|
b1ae5a5465 | ||
|
4ea4bc8631 | ||
|
fe259dca05 | ||
|
86f3066f56 | ||
|
0f579e18b3 | ||
|
25172fb027 | ||
|
550a0db61a | ||
|
73c2609a72 | ||
|
1ec7961b0e | ||
|
01f2a5f412 | ||
|
480a054860 | ||
|
e9ba9352a6 | ||
|
0a3ff099c0 | ||
|
75cc5854bf | ||
|
77aaa0b849 | ||
|
0945dd6ba4 | ||
|
4c94ba43ac | ||
|
efae2e8c32 | ||
|
59874b862d | ||
|
da19859c25 | ||
|
d87e425c24 | ||
|
2872080d1a | ||
|
625a3a39a5 | ||
|
aa2d187c4d | ||
|
b80e5dc52e | ||
|
2208d45bfb | ||
|
51e4e71931 | ||
|
0388fa6f36 | ||
|
ee7c04a568 | ||
|
9707aa6237 | ||
|
019ee34c0d | ||
|
873bdf0bc7 | ||
|
54c409ce6f | ||
|
a2d0747b4c | ||
|
25eac432c9 | ||
|
dd61f99785 | ||
|
164f207084 | ||
|
cb01b0c9a9 | ||
|
139a0698c9 | ||
|
259b813a96 | ||
|
299b28b3c1 | ||
|
ece33c1d68 | ||
|
f68cf4e317 | ||
|
c49c5c4121 | ||
|
b8d7956d4c | ||
|
fee872585f | ||
|
2dd3a2ba4d | ||
|
662033399f | ||
|
bb33123b17 | ||
|
fab6a977aa | ||
|
25dd6b71f3 | ||
|
246416d4d2 | ||
|
1fe037486f | ||
|
f27b500028 | ||
|
5a780e8211 | ||
|
60ca0649ab | ||
|
dbda128813 | ||
|
ff7a0626ce | ||
|
9a94c59605 | ||
|
d3dd9c849a | ||
|
12a78ce291 | ||
|
c8a735e298 | ||
|
071bf9fba7 | ||
|
ef919a0fa9 | ||
|
88b18dda07 | ||
|
242df842bc | ||
|
a6355885fc | ||
|
77a44f11c6 | ||
|
afeb726d53 | ||
|
7904e6b562 | ||
|
224b973ace | ||
|
8660d0f488 | ||
|
be7aa3d379 | ||
|
8503fea0ee | ||
|
4a15a41cf8 | ||
|
3a90309383 | ||
|
b48e5ce58a | ||
|
f33d6f4729 | ||
|
6f8989a8ba | ||
|
a826b661f4 | ||
|
43f4e52995 | ||
|
23b3b165d5 | ||
|
2f87e126f0 | ||
|
59d74b6273 | ||
|
7b92f355c8 | ||
|
982069be32 | ||
|
63ff8987ea | ||
|
f276d4e39f | ||
|
1811a8b733 | ||
|
0ae03e2c54 | ||
|
c423a8b4bc | ||
|
c207a34872 | ||
|
2cb40d3da6 | ||
|
18d1f9f649 | ||
|
17ce8d0fe9 | ||
|
ac03c9bab4 | ||
|
8bdffe6f9c | ||
|
2ff13089fd | ||
|
772346507c | ||
|
0fc88a84be | ||
|
6e4f6af942 | ||
|
c87daa510e | ||
|
5e1c9694e7 | ||
|
a9a42164a3 | ||
|
0d50fe9b77 | ||
|
e5c228bab2 | ||
|
7ccac87b93 | ||
|
24a2afb5b9 | ||
|
9652801cce | ||
|
881b7514e2 | ||
|
17fe6c9a5b | ||
|
f02b6d5fd9 | ||
|
eaf4d02aea | ||
|
56a4d52995 | ||
|
ec5c149df5 | ||
|
c0f32237e3 | ||
|
5a1c8c7a7e | ||
|
46cd26e774 | ||
|
544c498eb6 | ||
|
5ad75dd0b8 | ||
|
b2248413b7 | ||
|
9296bafbd9 | ||
|
8abf39762d | ||
|
87cbce4244 | ||
|
ef61f16896 | ||
|
e96450d226 | ||
|
2cf08cf448 | ||
|
59cfd7c757 | ||
|
d3c7f11f2d | ||
|
187fd70077 | ||
|
fe7f98a98b | ||
|
66c18fcd31 | ||
|
5773da0d08 | ||
|
d581f1f329 | ||
|
f165969d61 | ||
|
8dc24bd327 | ||
|
59066cb46d | ||
|
6c4d88ff57 | ||
|
a40542d57b | ||
|
697aefc7bb | ||
|
8bc71bb810 | ||
|
91210f26e9 | ||
|
44a49db04d | ||
|
0bfa4bff3c | ||
|
73aa590056 | ||
|
985b249a24 | ||
|
f5e138bed0 | ||
|
b6c0e2e4fd | ||
|
df8ef6be6b | ||
|
8a3186e1c8 | ||
|
68043f2a52 | ||
|
95bbce77a2 | ||
|
ec85f9f8a0 | ||
|
82e4998092 | ||
|
48259d872b | ||
|
8d13e1f341 | ||
|
33ef949507 | ||
|
68714e0e55 | ||
|
9ee1666a76 | ||
|
8dcdc9afe1 | ||
|
724f6e590e | ||
|
507090515b | ||
|
1dfbe9e44c | ||
|
d303ab2b50 | ||
|
b17ae78d6b | ||
|
391b0b01fc | ||
|
20861f0ee4 | ||
|
ff5f48b7e7 | ||
|
9a301175b0 | ||
|
712c79020d | ||
|
12dfbef76b | ||
|
b1f607ef70 | ||
|
107e8414bb | ||
|
4f8b7e9f59 | ||
|
a077e7df3c | ||
|
a2257fe1e2 | ||
|
50353d0b8f | ||
|
0f5621ff66 | ||
|
2eca77fb02 | ||
|
3de5b5fe0b | ||
|
499a9f4082 | ||
|
3043506d86 | ||
|
7db904b359 | ||
|
5abeb7aac2 | ||
|
e04691afb9 | ||
|
15ced50640 | ||
|
1a2e1fdf75 | ||
|
3531dd6d07 | ||
|
cf55f317f8 | ||
|
79554a2dbc | ||
|
06c232545a | ||
|
11184750ec | ||
|
77b221fc5a | ||
|
20cd6a9c18 | ||
|
34d7e7055a | ||
|
0c1e01cad4 | ||
|
a68e46eb0a | ||
|
203a3f7d07 | ||
|
de133cddb4 | ||
|
a5c9b94257 | ||
|
c203ec8921 | ||
|
de021da300 | ||
|
693afa3528 | ||
|
5203188d9e | ||
|
08663209d6 | ||
|
232e4b3e65 | ||
|
13ebc8ad55 | ||
|
759f8db1bc | ||
|
9a7cccacab | ||
|
288aa764b3 | ||
|
a32cc0f213 | ||
|
fdbc2ae372 | ||
|
9129a9d2d8 | ||
|
cb46c57754 | ||
|
536c0ffe4e | ||
|
4f30e37f22 | ||
|
0deb8fd7c6 | ||
|
85edc3084b | ||
|
3a99a284c4 | ||
|
5e3cc8fcb5 | ||
|
f92bd1bcdd | ||
|
519efaf22c | ||
|
28ef01505d | ||
|
dec574e59e | ||
|
7ad8b8522b | ||
|
14a1bc8a5d | ||
|
45a9c0a86d | ||
|
7edb2c8919 | ||
|
5fad83a50d | ||
|
68fefe8532 | ||
|
c96590b713 | ||
|
dc22856431 | ||
|
2d1f0e99b9 | ||
|
da720c8613 | ||
|
eaabe01fa5 | ||
|
dc07c8d87e | ||
|
a402bfd7f9 | ||
|
fe2d279eac | ||
|
0ffbac1629 | ||
|
65cc22a305 | ||
|
2ae8c6073f | ||
|
dc5cf6d7b8 | ||
|
e6ae9ecc51 | ||
|
85fb5e3684 | ||
|
98b19328de | ||
|
73ddc205fc | ||
|
1e6eaff702 | ||
|
78d49ca8ae | ||
|
f4c89644ff | ||
|
798a7befb8 | ||
|
6a388ffc71 | ||
|
82df73278e | ||
|
68a39d7fa1 | ||
|
120f0ff94f | ||
|
f47ba6fea6 | ||
|
5f8da66322 | ||
|
67213d471b | ||
|
7c35c604f4 | ||
|
c11cc7d0d2 | ||
|
89f1e7b6e5 | ||
|
bd826783cc | ||
|
0fb867e7c6 | ||
|
6b3187c2c9 | ||
|
75ce620e6b | ||
|
d9c4063307 | ||
|
5f2b25532b | ||
|
0998c18efd | ||
|
9eeeaf02ad | ||
|
df937dc847 | ||
|
1ef7bb4e93 | ||
|
41890589e1 | ||
|
a0cd66ed45 | ||
|
b84f97a902 | ||
|
ffc3fcef67 | ||
|
09563bc01e | ||
|
f1bb44b3e7 | ||
|
1085c715c2 | ||
|
c105e8357c | ||
|
dc1b0e629e | ||
|
62a0fd70de | ||
|
438e18328d | ||
|
57b31149f1 | ||
|
09390e7ffb | ||
|
e83d021a5c | ||
|
bca012fccb | ||
|
bb37c31fed | ||
|
0005f927e8 | ||
|
9222257d79 | ||
|
585d8b2d7d | ||
|
d3470dca73 | ||
|
0f60e9600f | ||
|
6a85dff94f | ||
|
aae88b8be4 | ||
|
72a617c16d | ||
|
d6355966bf | ||
|
845d577482 | ||
|
9ccf256ee8 | ||
|
cc9b00673e | ||
|
e7c75b2d3b | ||
|
480155fd8c | ||
|
d98bdac0be | ||
|
32cf5c0939 | ||
|
a330a64340 | ||
|
bef8fb40ef | ||
|
344bc519c4 | ||
|
3da58d9541 | ||
|
065ebaac61 | ||
|
f85ef61ce9 | ||
|
e5c6ef5368 | ||
|
a697c47c49 | ||
|
c6e712d44c | ||
|
00c9bdc2a6 | ||
|
1b31d8ef6f | ||
|
b3a9512f44 | ||
|
a06016a442 | ||
|
c02a1118d7 | ||
|
d9f1190e1f | ||
|
36c58ee76f | ||
|
b04d141680 | ||
|
3453fe5fc1 | ||
|
016876e814 | ||
|
e8ba515075 | ||
|
c533da3f38 | ||
|
8261c1d5b5 | ||
|
1d12e6a8b0 | ||
|
2957d63e2f | ||
|
7b8d62d1ec | ||
|
2063f48cf0 | ||
|
2510329cdf | ||
|
9c8c81cc25 | ||
|
2e5b3671e9 | ||
|
705ce9ae40 | ||
|
f43daacd41 | ||
|
3b41f69762 | ||
|
a722e594af | ||
|
a447689a86 | ||
|
9fe4887626 | ||
|
6492b95edd | ||
|
c5b4b70aae | ||
|
55f6ef4a5e | ||
|
672c74c76c | ||
|
95f17702d7 | ||
|
ee4764adc4 | ||
|
3a8eed1062 | ||
|
29abd290f4 | ||
|
3bf114cf48 | ||
|
96a6179355 | ||
|
a6382b730b | ||
|
661371f6f0 | ||
|
b9b4a7c65c | ||
|
20a5660ea1 | ||
|
691c380779 | ||
|
7546232c10 | ||
|
4fa3d9de6e | ||
|
90bfbf6022 | ||
|
079d5b4ec5 | ||
|
2c5d51cb2a | ||
|
1c146e3ce7 | ||
|
6159711a05 | ||
|
208fdf4210 | ||
|
0fc602ecdb | ||
|
552ea2a1bb | ||
|
5f6247cf8b | ||
|
c96a50e081 | ||
|
812bbffab1 | ||
|
361f1a0f0c | ||
|
8e84e4a230 | ||
|
fe4d49e334 | ||
|
dc01dd79ee | ||
|
0003993173 | ||
|
b6442acf80 | ||
|
a6feb65b34 | ||
|
f1d3ebc466 | ||
|
0608356465 | ||
|
d7ef6ff8ba | ||
|
2904246cb5 | ||
|
5abb42eab9 | ||
|
ce91e34f9f | ||
|
18f4c3611c | ||
|
6bdd6cf037 | ||
|
e36c619fc2 | ||
|
76253e1e26 | ||
|
bf8dc6642f | ||
|
3d8c384bb7 | ||
|
3247b63cb9 | ||
|
100c6c0ac9 | ||
|
f063726cc3 | ||
|
84f7adbfb2 | ||
|
b6c497d32b | ||
|
f430187b11 | ||
|
37c49e22b3 | ||
|
9e12930a43 | ||
|
d5f4457701 | ||
|
c9f86b05dd | ||
|
6e447aa8b2 | ||
|
5eb7ca07fe | ||
|
6d6cddff58 | ||
|
94cc17271e | ||
|
9aadb9087a | ||
|
2e35d4003f | ||
|
0d076fe8ba | ||
|
bfed59a7cf | ||
|
047308a904 | ||
|
0179d45b5b | ||
|
c04b98c75a | ||
|
3501eccb8e | ||
|
a80780b9ed | ||
|
1b6b0907f9 | ||
|
4389a7d350 | ||
|
227c39d95d | ||
|
25b9350fea | ||
|
9b7b57a4d4 | ||
|
5aae705172 | ||
|
b653beacd3 | ||
|
88f57e5251 | ||
|
f44fd9df7a | ||
|
8513304aeb | ||
|
e38c87f258 | ||
|
03a9470007 | ||
|
2d8b8ba799 | ||
|
e4ee496bc9 | ||
|
205934d55b | ||
|
95c3f049fb | ||
|
25c0fd7b8b | ||
|
1cf60c2615 | ||
|
c3d8128ff3 | ||
|
aa2ae03ce6 | ||
|
6137d5650f | ||
|
0703db05cf | ||
|
0e1990a2bb | ||
|
25e53c2abc | ||
|
14f5858e3d | ||
|
a6d374bcff | ||
|
85eba9c37a | ||
|
0acbcc9f95 | ||
|
1d67dd3b99 | ||
|
ab45c4499f | ||
|
75e4b7d2ae | ||
|
5e92728d77 | ||
|
b5d879a90b | ||
|
507efaec48 | ||
|
0db14e2466 | ||
|
6ced04daf0 | ||
|
b6064fa348 | ||
|
6207f79f01 | ||
|
295b04fb5a | ||
|
19072530e4 | ||
|
39ed42ccb9 | ||
|
89da25c25e | ||
|
4debbb2a66 | ||
|
e38bd942a8 | ||
|
47b2081ccb | ||
|
298602768e | ||
|
d2dbc9d6dd | ||
|
d512029f10 | ||
|
dc7dea1c3e | ||
|
2d3be33bb0 | ||
|
65ab86221b | ||
|
b8e25efdd4 | ||
|
2e4bda71a8 | ||
|
30d7ee52f4 | ||
|
539c3de7b8 | ||
|
687e1ba6b8 | ||
|
a72ae92ece | ||
|
1562f17d8c | ||
|
255e6c1545 | ||
|
c733ecad98 | ||
|
088d0f6981 | ||
|
13e5e4d943 | ||
|
6e72ad0cc3 | ||
|
cb89bd5708 | ||
|
30bb453747 | ||
|
a37e734cf1 | ||
|
a27768fe21 | ||
|
6affc484f4 | ||
|
90ea2dd569 | ||
|
df6d1f1c2b | ||
|
34a604f4fc | ||
|
f7941bbc25 | ||
|
f6ecc25a4b | ||
|
5c53f394cb | ||
|
967f9570e2 | ||
|
19d6ce5486 | ||
|
3b4dc137d2 | ||
|
78314c1ef2 | ||
|
b3c9d43ccd | ||
|
7f5952aa9c | ||
|
b018adac11 | ||
|
4904c6cf71 | ||
|
26dee81164 | ||
|
c09972e7a8 | ||
|
6069ccacdc | ||
|
22d72de969 | ||
|
b712dddfc0 | ||
|
d6cebc46ce | ||
|
85c78e6566 | ||
|
fcb6504f1e | ||
|
b25f7ec4a3 | ||
|
5401a59da0 | ||
|
00561e0973 | ||
|
642da319d4 | ||
|
a6e9525264 | ||
|
4d5413ac26 | ||
|
ccea63555c | ||
|
51c5ef1520 | ||
|
c6ede56942 | ||
|
e0ecc9aaf4 | ||
|
44c2f18f76 | ||
|
53c669f906 | ||
|
c076a87098 | ||
|
90c0ed281d | ||
|
aac69a9c14 | ||
|
08bfef961e | ||
|
d9f45db71e | ||
|
73d85c2b4e | ||
|
55b38556fe | ||
|
a71756acda | ||
|
9470efe410 | ||
|
0b29bda206 | ||
|
042da24e7d | ||
|
a1e99e5591 | ||
|
b98b31a427 | ||
|
4eb0d9d750 | ||
|
4af93d94dd | ||
|
e7fae25821 | ||
|
53de517488 | ||
|
15e854b8f1 | ||
|
a85e45c6b0 | ||
|
4d3feeba64 | ||
|
92ddcbb1e3 | ||
|
7c7a5de988 | ||
|
cbf7eadc6c | ||
|
d775be25f7 | ||
|
8dc4321deb | ||
|
0d3e6b8386 | ||
|
7ee44fb0e4 | ||
|
ab76aa73f2 | ||
|
179baf030e | ||
|
2f214da8a2 | ||
|
e357a46b62 | ||
|
6c8228a26c | ||
|
b3a0933221 | ||
|
557b792c77 | ||
|
2ee9b40527 | ||
|
32f3dc21e4 | ||
|
b31a9bd61a | ||
|
67896dfd86 | ||
|
88ba8e3ba7 | ||
|
771567365d | ||
|
5fdb94d804 | ||
|
026e663155 | ||
|
4b7af1d2a6 | ||
|
d9cd1d3282 | ||
|
c239322c4d | ||
|
0c19d75bf4 | ||
|
084de2e58c | ||
|
88c304292f | ||
|
c40a7fd644 | ||
|
b976164ee9 | ||
|
d102994214 | ||
|
7422a72a7b | ||
|
3044d192f9 | ||
|
afa2b11975 | ||
|
1a604ac2e3 | ||
|
d60034f7d7 | ||
|
8980304e56 | ||
|
02af02f0cb | ||
|
9971041e45 | ||
|
e9b21e2ef3 | ||
|
5cfecf8068 | ||
|
bf189dbe6a | ||
|
ecb1a9f1f9 | ||
|
22b3fe1ac8 | ||
|
953783dc2b | ||
|
8f972322a5 | ||
|
1fa93c04d2 | ||
|
ebe6202e22 | ||
|
b4971d76d6 | ||
|
ead0be2763 | ||
|
29aa81a694 | ||
|
f59da723fb | ||
|
a9ab67990a | ||
|
adefbb7b2c | ||
|
c550657912 | ||
|
578dfb3d99 | ||
|
f49287cf09 | ||
|
777003ed51 | ||
|
a254a44f0e | ||
|
506ed57ca6 | ||
|
c31d4ec8a3 | ||
|
9e822c4b18 | ||
|
260616711f | ||
|
b9a8f6155c | ||
|
a450957b9a | ||
|
512bacc1be | ||
|
782d4501cd | ||
|
26dfe97ffd | ||
|
48748797b7 | ||
|
ee63cbde62 | ||
|
e7b58c72ac | ||
|
9b16e3f282 | ||
|
4e9601d905 | ||
|
48cced2ff9 | ||
|
57d3b80e85 | ||
|
cbd2c6727f | ||
|
5daa9014a8 | ||
|
b2981a57bc | ||
|
98b259be33 | ||
|
b30d702f22 | ||
|
40bd32280a | ||
|
fbde403f6f | ||
|
4b210e7aab | ||
|
2929106049 | ||
|
485ba419b3 | ||
|
0e751bdd73 | ||
|
556bc84023 | ||
|
715438e486 | ||
|
68ae8b9277 | ||
|
c5b884cf81 | ||
|
a4bff161aa | ||
|
950f0851bf | ||
|
49d7072440 | ||
|
9d5e0cdc00 | ||
|
841bd784d9 | ||
|
47ce9db383 | ||
|
c52d562411 | ||
|
6b041e6822 | ||
|
c82dd87830 | ||
|
b002505ac2 | ||
|
09aa9e6792 | ||
|
108928c6a5 | ||
|
3112eb0a9b | ||
|
070b2230c9 | ||
|
958a8a09da | ||
|
62593987f4 | ||
|
e422ccc509 | ||
|
6ccca6d4c0 | ||
|
57972a56af | ||
|
67ad4a63e4 | ||
|
7a86a1b17f | ||
|
f504fc8791 | ||
|
09d0736d3b | ||
|
2f1c7300d4 | ||
|
6824f045fd | ||
|
e57c5fccb3 | ||
|
a6ef53291b | ||
|
1993007d49 | ||
|
8b606dd5f9 | ||
|
799558d201 | ||
|
bb5d5cba8e | ||
|
82fed172d5 | ||
|
cd6babeab1 | ||
|
e87e4c582c | ||
|
97efd11b86 | ||
|
bb6421b339 | ||
|
50c8c4da34 | ||
|
dcea921907 | ||
|
0beb3fc3a2 | ||
|
1633d91331 | ||
|
d126d56e98 | ||
|
06e5698d89 | ||
|
5ceb093f3c | ||
|
698240b6e0 | ||
|
101d6a37ce | ||
|
4cff2a52c0 | ||
|
f906cbd851 | ||
|
0c0be69e42 | ||
|
97a397d5c8 | ||
|
54e89a2118 | ||
|
704324ca77 | ||
|
aef7f52f96 | ||
|
5035fe6f44 | ||
|
399c9f82c3 | ||
|
ff0d08cc8e | ||
|
7655007f8a | ||
|
1fc811e437 | ||
|
8d3f9362fb | ||
|
e30386ce4a | ||
|
d12f5c65e1 | ||
|
2b04cb38d9 | ||
|
0faa5ae455 | ||
|
3bf04735c1 | ||
|
f54bb0981f | ||
|
f41be396fa | ||
|
d31a5c3c48 | ||
|
314fecfcd4 | ||
|
497b5febc3 | ||
|
2276ae0c5b | ||
|
53f2eafd16 | ||
|
a26a50941b | ||
|
075712d940 | ||
|
c0bb30e12d | ||
|
99c129cb03 | ||
|
dd199a6893 | ||
|
d627c778e5 | ||
|
ee626fd36e | ||
|
315fecdf71 | ||
|
b198e131ee | ||
|
3e6b927cde | ||
|
fcc970db3f | ||
|
a96088de9c | ||
|
4195ca591c | ||
|
86f543cb72 | ||
|
9aae0e7886 | ||
|
8e09795c95 | ||
|
5bf9f05c4b | ||
|
8e340e456d | ||
|
61b834e52c | ||
|
11b8001308 | ||
|
1807f3e3d7 | ||
|
e03686f961 | ||
|
c5d3c2bd62 | ||
|
d838caadd5 | ||
|
086f03730b | ||
|
fea32de3d3 | ||
|
b41d316fd6 | ||
|
e37bf90da7 | ||
|
544f5f0b92 | ||
|
73f889b45b | ||
|
7b5f5e2ed0 | ||
|
cd4790dd4b | ||
|
0421b58c55 | ||
|
a7895a17bf | ||
|
925c354551 | ||
|
58ab762131 | ||
|
adb5ea9a30 | ||
|
5c48b75375 | ||
|
678efa72e1 | ||
|
01d4126cc8 | ||
|
bf12d18457 | ||
|
d4ee737e50 | ||
|
ee7f22e902 | ||
|
6a77ac656f | ||
|
2f7acfd87a | ||
|
cce287a1e7 | ||
|
aa74e1e8f5 | ||
|
3e0c8aa83b | ||
|
6d9b49d4bb | ||
|
aca08cf74d | ||
|
529a1d3b95 | ||
|
db7a525bc7 |
6
.gitignore
vendored
6
.gitignore
vendored
@@ -14,9 +14,9 @@ wa_output/
|
||||
doc/source/api/
|
||||
doc/source/extensions/
|
||||
MANIFEST
|
||||
wlauto/external/uiautomator/bin/
|
||||
wlauto/external/uiautomator/*.properties
|
||||
wlauto/external/uiautomator/build.xml
|
||||
wlauto/external/uiauto/bin/
|
||||
wlauto/external/uiauto/*.properties
|
||||
wlauto/external/uiauto/build.xml
|
||||
*.orig
|
||||
local.properties
|
||||
wlauto/external/revent/libs/
|
||||
|
@@ -46,6 +46,8 @@ documentation.
|
||||
Documentation
|
||||
=============
|
||||
|
||||
You can view pre-built HTML documentation `here <http://pythonhosted.org/wlauto/>`_.
|
||||
|
||||
Documentation in reStructuredText format may be found under ``doc/source``. To
|
||||
compile it into cross-linked HTML, make sure you have `Sphinx
|
||||
<http://sphinx-doc.org/install.html>`_ installed, and then ::
|
||||
|
@@ -6,6 +6,11 @@ distributed as part of WA releases.
|
||||
Scripts
|
||||
-------
|
||||
|
||||
:check_apk_versions: Compares WA workload versions with the versions listed in APK
|
||||
if there are any incistency it will highlight these. This
|
||||
requires all APK files to be present for workloads with
|
||||
versions.
|
||||
|
||||
:clean_install: Performs a clean install of WA from source. This will remove any
|
||||
existing WA install (regardless of whether it was made from
|
||||
source or through a tarball with pip).
|
||||
|
66
dev_scripts/check_apk_versions
Normal file
66
dev_scripts/check_apk_versions
Normal file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
from distutils.version import StrictVersion
|
||||
|
||||
from wlauto.core.extension_loader import ExtensionLoader
|
||||
from wlauto.common.android.workload import ApkWorkload
|
||||
from wlauto.utils.android import ApkInfo
|
||||
|
||||
el = ExtensionLoader()
|
||||
|
||||
|
||||
class fake_config(object):
|
||||
def __init__(self, ext_loader):
|
||||
self.ext_loader = ext_loader
|
||||
self.get_extension = ext_loader.get_extension
|
||||
|
||||
|
||||
class fake_device(object):
|
||||
platform = "android"
|
||||
|
||||
config = fake_config(el)
|
||||
device = fake_device()
|
||||
|
||||
if "WA_USER_DIRECTORY" in os.environ:
|
||||
base_path = os.environ["WA_USER_DIRECTORY"]
|
||||
else:
|
||||
base_path = "~/.workload_automation/dependencies/"
|
||||
|
||||
apk_workloads = [e for e in el.list_workloads()
|
||||
if issubclass(el.get_extension_class(e.name), ApkWorkload)]
|
||||
|
||||
for wl in apk_workloads:
|
||||
# Get versions from workloads
|
||||
workload_versions = []
|
||||
for p in wl.parameters:
|
||||
if p.name == "version" and p.allowed_values:
|
||||
workload_versions = p.allowed_values
|
||||
break
|
||||
else:
|
||||
continue
|
||||
|
||||
dep_path = os.path.join(os.path.expanduser(base_path), wl.name)
|
||||
apks = [apk for apk in os.listdir(dep_path) if apk.endswith(".apk")]
|
||||
|
||||
# Get versions from APK files
|
||||
apk_versions = []
|
||||
for apk in apks:
|
||||
# skip antutu 3d benchmark apk
|
||||
if apk == "com.antutu.benchmark.full-1.apk":
|
||||
continue
|
||||
apk_versions.append(ApkInfo(os.path.join(dep_path, apk)).version_name)
|
||||
|
||||
# Output workload info
|
||||
print "Workload: {}".format(wl.name)
|
||||
print "Workload Versions: {}".format(sorted(workload_versions, key=StrictVersion))
|
||||
print "APK versions: {}".format(sorted(apk_versions, key=StrictVersion))
|
||||
|
||||
# Check for bad/missing versions
|
||||
error = False
|
||||
for v in apk_versions:
|
||||
if v not in workload_versions:
|
||||
msg = "APK version '{}' not present in workload list of versions"
|
||||
print msg.format(v)
|
||||
error = True
|
||||
if not error:
|
||||
print "OK"
|
@@ -1,13 +1,82 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import logging
|
||||
import subprocess
|
||||
import argparse
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from wlauto.exceptions import WAError
|
||||
from wlauto.utils.misc import write_table
|
||||
from distmanagement.apk import get_aapt_path, get_apk_versions
|
||||
from wlauto.exceptions import WAError, ToolError
|
||||
from wlauto.utils.doc import format_simple_table
|
||||
|
||||
|
||||
def get_aapt_path():
|
||||
"""Return the full path to aapt tool."""
|
||||
sdk_path = os.getenv('ANDROID_HOME')
|
||||
if not sdk_path:
|
||||
raise ToolError('Please make sure you have Android SDK installed and have ANDROID_HOME set.')
|
||||
build_tools_directory = os.path.join(sdk_path, 'build-tools')
|
||||
versions = os.listdir(build_tools_directory)
|
||||
for version in reversed(sorted(versions)):
|
||||
aapt_path = os.path.join(build_tools_directory, version, 'aapt')
|
||||
if os.path.isfile(aapt_path):
|
||||
logging.debug('Found aapt for version {}'.format(version))
|
||||
return aapt_path
|
||||
else:
|
||||
raise ToolError('aapt not found. Please make sure at least one Android platform is installed.')
|
||||
|
||||
|
||||
def get_apks(path):
|
||||
"""Return a list of paths to all APK files found under the specified directory."""
|
||||
apks = []
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
_, ext = os.path.splitext(file)
|
||||
if ext.lower() == '.apk':
|
||||
apks.append(os.path.join(root, file))
|
||||
return apks
|
||||
|
||||
|
||||
class ApkVersionInfo(object):
|
||||
|
||||
def __init__(self, workload=None, package=None, label=None, version_name=None, version_code=None):
|
||||
self.workload = workload
|
||||
self.package = package
|
||||
self.label = label
|
||||
self.version_name = version_name
|
||||
self.version_code = version_code
|
||||
|
||||
def to_tuple(self):
|
||||
return (self.workload, self.package, self.label, self.version_name, self.version_code)
|
||||
|
||||
|
||||
version_regex = re.compile(r"name='(?P<name>[^']+)' versionCode='(?P<vcode>[^']+)' versionName='(?P<vname>[^']+)'")
|
||||
|
||||
|
||||
def extract_version_info(apk_path, aapt):
|
||||
command = [aapt, 'dump', 'badging', apk_path]
|
||||
output = subprocess.check_output(command)
|
||||
version_info = ApkVersionInfo(workload=apk_path.split(os.sep)[-2])
|
||||
for line in output.split('\n'):
|
||||
if line.startswith('application-label:'):
|
||||
version_info.label = line.split(':')[1].strip().replace('\'', '')
|
||||
elif line.startswith('package:'):
|
||||
match = version_regex.search(line)
|
||||
if match:
|
||||
version_info.package = match.group('name')
|
||||
version_info.version_code = match.group('vcode')
|
||||
version_info.version_name = match.group('vname')
|
||||
else:
|
||||
pass # not interested
|
||||
return version_info
|
||||
|
||||
|
||||
def get_apk_versions(path, aapt):
|
||||
apks = get_apks(path)
|
||||
versions = [extract_version_info(apk, aapt) for apk in apks]
|
||||
return versions
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
@@ -18,8 +87,10 @@ if __name__ == '__main__':
|
||||
args = parser.parse_args()
|
||||
|
||||
versions = get_apk_versions(args.path, aapt)
|
||||
write_table([v.to_tuple() for v in versions], sys.stdout,
|
||||
align='<<<>>', headers=['path', 'package', 'name', 'version code', 'version name'])
|
||||
table = format_simple_table([v.to_tuple() for v in versions],
|
||||
align='<<<>>',
|
||||
headers=['workload', 'package', 'name', 'version code', 'version name'])
|
||||
print table
|
||||
except WAError, e:
|
||||
logging.error(e)
|
||||
sys.exit(1)
|
||||
|
@@ -1,8 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
DEFAULT_DIRS=(
|
||||
wlauto
|
||||
wlauto/external/daq_server/src/daqpower
|
||||
)
|
||||
|
||||
EXCLUDE=wlauto/external/,wlauto/tests
|
||||
EXCLUDE_COMMA=wlauto/core/bootstrap.py,wlauto/workloads/geekbench/__init__.py
|
||||
IGNORE=E501,E265,E266,W391
|
||||
IGNORE=E501,E265,E266,W391,E401,E402,E731
|
||||
|
||||
if ! hash pep8 2>/dev/null; then
|
||||
echo "pep8 not found in PATH"
|
||||
@@ -13,7 +18,9 @@ fi
|
||||
if [[ "$1" == "" ]]; then
|
||||
THIS_DIR="`dirname \"$0\"`"
|
||||
pushd $THIS_DIR/.. > /dev/null
|
||||
pep8 --exclude=$EXCLUDE,$EXCLUDE_COMMA --ignore=$IGNORE wlauto
|
||||
for dir in "${DEFAULT_DIRS[@]}"; do
|
||||
pep8 --exclude=$EXCLUDE,$EXCLUDE_COMMA --ignore=$IGNORE $dir
|
||||
done
|
||||
pep8 --exclude=$EXCLUDE --ignore=$IGNORE,E241 $(echo "$EXCLUDE_COMMA" | sed 's/,/ /g')
|
||||
popd > /dev/null
|
||||
else
|
||||
|
@@ -1,5 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
DEFAULT_DIRS=(
|
||||
wlauto
|
||||
wlauto/external/daq_server/src/daqpower
|
||||
)
|
||||
|
||||
target=$1
|
||||
|
||||
compare_versions() {
|
||||
@@ -30,17 +35,19 @@ compare_versions() {
|
||||
}
|
||||
|
||||
pylint_version=$(python -c 'from pylint.__pkginfo__ import version; print version')
|
||||
compare_versions $pylint_version "1.3.0"
|
||||
compare_versions $pylint_version "1.5.1"
|
||||
result=$?
|
||||
if [ "$result" == "2" ]; then
|
||||
echo "ERROR: pylint version must be at least 1.3.0; found $pylint_version"
|
||||
echo "ERROR: pylint version must be at least 1.5.1; found $pylint_version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
THIS_DIR="`dirname \"$0\"`"
|
||||
if [[ "$target" == "" ]]; then
|
||||
pushd $THIS_DIR/.. > /dev/null
|
||||
pylint --rcfile extras/pylintrc wlauto
|
||||
for dir in "${DEFAULT_DIRS[@]}"; do
|
||||
pylint --rcfile extras/pylintrc $dir
|
||||
done
|
||||
popd > /dev/null
|
||||
else
|
||||
pylint --rcfile $THIS_DIR/../extras/pylintrc $target
|
||||
|
@@ -6,10 +6,10 @@ Modules
|
||||
|
||||
Modules are essentially plug-ins for Extensions. They provide a way of defining
|
||||
common and reusable functionality. An Extension can load zero or more modules
|
||||
during it's creation. Loaded modules will then add their capabilities (see
|
||||
during its creation. Loaded modules will then add their capabilities (see
|
||||
Capabilities_) to those of the Extension. When calling code tries to access an
|
||||
attribute of an Extension the Extension doesn't have, it will try to find the
|
||||
attribute among it's loaded modules and will return that instead.
|
||||
attribute among its loaded modules and will return that instead.
|
||||
|
||||
.. note:: Modules are themselves extensions, and can therefore load their own
|
||||
modules. *Do not* abuse this.
|
||||
|
57
doc/source/apk_workloads.rst
Normal file
57
doc/source/apk_workloads.rst
Normal file
@@ -0,0 +1,57 @@
|
||||
.. _apk_workload_settings:
|
||||
|
||||
APK Workloads
|
||||
=============
|
||||
|
||||
APK resolution
|
||||
--------------
|
||||
|
||||
WA has various resource getters that can be configured to locate APK files but for most people APK files
|
||||
should be kept in the ``$WA_HOME/dependencies/SOME_WORKLOAD/`` directory. (by default
|
||||
``~/.workload_automation/dependencies/SOME_WORKLOAD/``). The ``WA_HOME`` enviroment variable can be used
|
||||
to chnage the location of this folder. The APK files need to be put into the corresponding directories
|
||||
for the workload they belong to. The name of the file can be anything but as explained below may need
|
||||
to contain certain peices of information.
|
||||
|
||||
All ApkWorkloads have parameters that affect the way in which APK files are resolved, ``exact_abi``,
|
||||
``force_install`` and ``check_apk``. Their exact behaviours are outlined below.
|
||||
|
||||
.. confval:: exact_abi
|
||||
|
||||
If this setting is enabled WA's resource resolvers will look for the devices ABI with any native
|
||||
code present in the apk. By default this setting is disabled since most apks will work across all
|
||||
devices. You may wish to enable this feature when working with devices that support multiple ABI's (like
|
||||
64-bit devices that can run 32-bit APK files) and are specifically trying to test one or the other.
|
||||
|
||||
.. confval:: force_install
|
||||
|
||||
If this setting is enabled WA will *always* use the APK file on the host, and re-install it on every
|
||||
iteration. If there is no APK on the host that is a suitable version and/or ABI for the workload WA
|
||||
will error when ``force_install`` is enabled.
|
||||
|
||||
.. confval:: check_apk
|
||||
|
||||
This parameter is used to specify a preference over host or target versions of the app. When set to
|
||||
``True`` WA will prefer the host side version of the APK. It will check if the host has the APK and
|
||||
if the host APK meets the version requirements of the workload. If does and the target already has
|
||||
same version nothing will be done, other wise it will overwrite the targets app with the host version.
|
||||
If the hosts is missing the APK or it does not meet version requirements WA will fall back to the app
|
||||
on the target if it has the app and it is of a suitable version. When this parameter is set to
|
||||
``false`` WA will prefer to use the version already on the target if it meets the workloads version
|
||||
requirements. If it does not it will fall back to search the host for the correct version. In both modes
|
||||
if neither the host nor target have a suitable version, WA will error and not run the workload.
|
||||
|
||||
Some workloads will also feature the follow parameters which will alter the way their APK files are resolved.
|
||||
|
||||
.. confval:: version
|
||||
|
||||
This parameter is used to specify which version of uiautomation for the workload is used. In some workloads
|
||||
e.g. ``geekbench`` multiple versions with drastically different UI's are supported. When a workload uses a
|
||||
version it is required for the APK file to contain the uiautomation version in the file name. In the case
|
||||
of antutu the file names could be: ``geekbench_2.apk`` or ``geekbench_3.apk``.
|
||||
|
||||
.. confval:: variant_name
|
||||
|
||||
Some workloads use variants of APK files, this is usually the case with web browser APK files, these work
|
||||
in exactly the same way as the version, the variant of the apk
|
||||
|
@@ -1,6 +1,846 @@
|
||||
=================================
|
||||
What's New in Workload Automation
|
||||
=================================
|
||||
-------------
|
||||
Version 2.6.0
|
||||
-------------
|
||||
|
||||
.. note:: Users who are currently using the GitHub master version of WA should
|
||||
uninstall the existing version before upgrading to avoid potential issues.
|
||||
|
||||
Additions:
|
||||
##########
|
||||
|
||||
Workloads
|
||||
~~~~~~~~~
|
||||
- ``AdobeReader``: A workload that carries out following typical productivity
|
||||
tasks. These include opening a file, performing various gestures and
|
||||
zooms on screen and searching for a predefined set of strings.
|
||||
- ``octaned8``: A workload to run the binary (non-browser) version of the JS
|
||||
benchmark Octane.
|
||||
- ``GooglePlayBooks``: A workload to perform standard productivity tasks with
|
||||
Google Play Books. This workload performs various tasks, such as searching
|
||||
for a book title online, browsing through a book, adding and removing notes,
|
||||
word searching, and querying information about the book.
|
||||
- ``GooglePhotos``: A workload to perform standard productivity tasks with
|
||||
Google Photos. Carries out various tasks, such as browsing images,
|
||||
performing zooms, and post-processing the image.
|
||||
- ``GoogleSlides``: Carries out various tasks, such as creating a new
|
||||
presentation, adding text, images, and shapes, as well as basic editing and
|
||||
playing a slideshow.
|
||||
- ``Youtube``: The workload plays a video, determined by the ``video_source``
|
||||
parameter. While the video is playing, some common actions such as video
|
||||
seeking, pausing playback and navigating the comments section are performed.
|
||||
- ``Skype``: Replacement for the ``skypevideo`` workload. Logs into Skype
|
||||
and initiates a voice or video call with a contact.
|
||||
|
||||
Framework
|
||||
~~~~~~~~~
|
||||
- ``AndroidUxPerfWorkload``: Added a new workload class to encapsulate
|
||||
functionality common to all uxperf workloads.
|
||||
- ``UxPerfUiAutomation``: Added class which contains methods specific to
|
||||
UX performance
|
||||
testing.
|
||||
- ``get-assets``: Added new script and command to retrieve external assets
|
||||
for workloads
|
||||
|
||||
Results Processors
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
- ``uxperf``: Parses device logcat for `UX_PERF` markers to produce performance
|
||||
metrics for workload actions using specified instrumentation.
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- ``State Detection``: Added feature to use visual state detection to
|
||||
verify the state of a workload after setup and run.
|
||||
|
||||
|
||||
Fixes/Improvements:
|
||||
###################
|
||||
|
||||
Documentation
|
||||
~~~~~~~~~~~~~~
|
||||
- ``Revent``: Added file structure to the documentation.
|
||||
- Clarified documentation regarding binary dependencies.
|
||||
- Updated documentation with ``create`` and ``get-assets`` commands.
|
||||
|
||||
Instruments
|
||||
~~~~~~~~~~~~
|
||||
- ``sysfs_extractor``: Fixed error when `tar.gz` file already existed on device,
|
||||
now overwrites.
|
||||
- ``cpufreq``: Fixed error when `tar.gz` file already existed on device, now
|
||||
overwrites.
|
||||
- ``file-poller``:
|
||||
- Improved csv output.
|
||||
- Added error checking and reporting.
|
||||
- Changed ``files`` to be a mandatory parameter.
|
||||
- ``fps``:
|
||||
- Added a new parameter to fps instrument to specify the time period between
|
||||
calls to ``dumpsys SurfaceFlinger --latency`` when collecting frame data.
|
||||
- Added gfxinfo methods to obtain fps stats. Auto detects and uses appropriate
|
||||
method via android version of device.
|
||||
- Fixed issue with regex.
|
||||
- Now handles empty frames correctly.
|
||||
- ``energy_model``: Ensures that the ``ui`` runtime parameter is only set for
|
||||
ChromeOS devices.
|
||||
- ``ftrace``: Added support to handle traces collected by both WA and devlib.
|
||||
- ``Perf``: Updated 32bit binary file for little endian devices.
|
||||
|
||||
Resource Getters
|
||||
~~~~~~~~~~~~~~~~
|
||||
- ``http_getter``: Now used to try and find executables files from a
|
||||
provided ``remove_assets_url``.
|
||||
|
||||
Result Processors
|
||||
~~~~~~~~~~~~~~~~~
|
||||
- ``cpu_states``: Fixes using stand-alone script with timeline option.
|
||||
|
||||
Workloads
|
||||
~~~~~~~~~
|
||||
- ``antutu``: Fixed setting permissions of ``FINE_LOCATION`` on some devices.
|
||||
- ``bbench`` Fixed handling of missing results.
|
||||
- ``camerarecord``:
|
||||
- Added frame stats collection through dumpsys gfxinfo.
|
||||
- Added possibility to select slow_motion recording mode.
|
||||
- ``Geekbench``:
|
||||
- Fixed output file listing causing pull failure.
|
||||
- Added support for Geekbench 4.
|
||||
- ``recentfling``:
|
||||
- Fixed issue when binaries were not uninstalled correctly.
|
||||
- Scripts are now deployed via ``install()`` to ensure they are executable.
|
||||
- Fixed handling of when a PID file is deleted before reaching processing
|
||||
results stage.
|
||||
- Added parameter to not start any apps before flinging.
|
||||
- ``rt-app``: Added camera recorder simulation.
|
||||
- ``sysbench``: Added arm64 binary.
|
||||
- ``Vellamo``: Fixed capitalization in part of UIAutomation to prevent
|
||||
potential issues.
|
||||
- ``Spec2000``: Now uses WA deployed version of busybox.
|
||||
- ``NetStat``: Updated to support new default logcat format in Android 6.
|
||||
- ``Dex2oat``: Now uses root if available.
|
||||
|
||||
Framework
|
||||
~~~~~~~~~
|
||||
- ``adb_shell``:
|
||||
- Fixed issue when using single quoted command with ``adb_shell``.
|
||||
- Correctly forward stderror to the caller for newer version of adb.
|
||||
- ``revent``
|
||||
- Added ``-S`` argument to "record" command to automatically record a
|
||||
screen capture after a recording is completed.
|
||||
- Fixed issue with multiple iterations of a revent workload.
|
||||
- Added ``-s`` option to executable to allow waiting on stdin.
|
||||
- Removed timeout in command as ``-s`` is specified.
|
||||
- Revent recordings can now be parsed and used within WA.
|
||||
- Fixed issue when some recordings wouldn't be retrieved correctly.
|
||||
- Timeout is now based on recording duration.
|
||||
- Added `magic` and file version to revent files. Revent files should now
|
||||
start with ``REVENT`` followed by the file format version.
|
||||
- Added support for gamepad recording. This type of recording contains
|
||||
only the events from a gamepad device (which is automatically
|
||||
identified).
|
||||
- A ``mode`` field has been added to the recording format to help
|
||||
distinguish between the normal and gamepad recording types.
|
||||
- Added ``-g`` option to ``record`` command to expose the gamepad recording
|
||||
mode.
|
||||
- The structure of revent code has undergone a major overhaul to improve
|
||||
maintainability and robustness.
|
||||
- More detailed ``info`` command output.
|
||||
- Updated Makefile to support debug/production builds.
|
||||
- ``Android API``: Upgraded Android API level from 17 to 18.
|
||||
- ``uiautomator``: The window hierarchy is now dumped to a file when WA fails
|
||||
on android devices.
|
||||
- ``AndroidDevice``:
|
||||
- Added support for downgrading when installing an APK.
|
||||
- Added a ``broadcast_media_mounted`` method to force a re-index of the
|
||||
mediaserver cache for a specified directory.
|
||||
- Now correctly handles ``None`` output for ``get_pids_of()`` when there are no
|
||||
running processes with the specified name.
|
||||
- Renamed the capture method from ``capture_view_hierachy`` to
|
||||
``capture_ui_hierarchy``.
|
||||
- Changed the file extension of the capture file to ``.uix``
|
||||
- Added ``-rf`` to delete_files to be consistent with ``LinuxDevice``.
|
||||
- ``LinuxDevice``: Now ensures output from both stdout and etderr is propagated in
|
||||
the event of a DeviceError.
|
||||
- ``APKWorkload``:
|
||||
- Now ensure APKs are replaced properly when reinstalling.
|
||||
- Now checks APK version and ABI when installing.
|
||||
- Fixed error on some devices when trying to grant permissions that were
|
||||
already granted.
|
||||
- Fixed some permissions not being granted.
|
||||
- Now allows disabling the main activity launch in setup (required for some
|
||||
apps).
|
||||
- Added parameter to clear data on reset (default behaviour unchanged).
|
||||
- Ignores exception for non-fatal permission grant failure.
|
||||
- Fixed issue of multiple versions of the same workload failing to find their APK.
|
||||
- Added method to ensure a valid apk version is used within a workload.
|
||||
- Updated how APK resolution is performed to maximise likelihood of
|
||||
a workload running.
|
||||
- When ``check_apk`` is ``True`` will prefer host APK and if no suitable APK
|
||||
is found, will use target APK if the correct version is present. When ``False``
|
||||
will prefer target apk if it is a valid version otherwise will fallback to
|
||||
host APK.
|
||||
- ``RunConfiguration``: Fixed disabling of instruments in workload specs.
|
||||
- ``Devices``:
|
||||
- Added network connectivity check for devices.
|
||||
- Subclasses can now set ``requires_network`` to ``True`` and network
|
||||
connectivity check will be performed during ``setup()``.
|
||||
- ``Workloads``:
|
||||
- Added network check methods.
|
||||
- Fixed versions to be backwards compatible.
|
||||
- Updated workload versions to match APK files.
|
||||
- Fixed issues with calling super.
|
||||
- ``Assets``: Added script to retrieve external assets for workloads.
|
||||
- ``Execution``: Added a ``clean_up`` global config option to delete WA files from
|
||||
devices.
|
||||
- ``Runner``: No longer takes a screenshot or dump of UI hierarchy for some errors when
|
||||
unnecessary, e.g. host errors.
|
||||
- ``core``: Constraints and allowed values are now checked when set instead of
|
||||
when validating.
|
||||
- ``FpsProcessor``:
|
||||
- Added requirement on ``filtered_vsyncs_to_compose`` for ``total_vsync metric``.
|
||||
- Removed misleading comment in class description.
|
||||
- ``BaseUiAutomation``: Added new Marker API so workloads generate start and end
|
||||
markers with a string name.
|
||||
- ``AndroidUiAutoBenchmark``: Automatically checks for known package versions
|
||||
that don't work well with AndroidUiAutoBenchmark workloads.
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- Updated setup.py url to be a valid URI.
|
||||
- Fixed workload name in big.Little sample agenda.
|
||||
|
||||
Incompatible changes
|
||||
####################
|
||||
|
||||
Framework
|
||||
~~~~~~~~~
|
||||
- ``check_abi``: Now renamed to ``exact_abi``, is used to ensure that if enabled,
|
||||
only an apk containing no native code or code designed for the devices primary
|
||||
abi is use.
|
||||
- ``AndroidDevice``: Renamed ``supported_eabis`` property to ``supported_abis``
|
||||
to be consistent with linux devices.
|
||||
|
||||
Workloads
|
||||
~~~~~~~~~~
|
||||
- ``skypevideo``: Workload removed and replaced with ``skype`` workload.
|
||||
|
||||
-------------
|
||||
Version 2.5.0
|
||||
-------------
|
||||
|
||||
Additions:
|
||||
##########
|
||||
|
||||
Instruments
|
||||
~~~~~~~~~~~
|
||||
- ``servo_power``: Added support for chromebook servo boards.
|
||||
- ``file_poller``: polls files and outputs a CSV of their values over time.
|
||||
- ``systrace``: The Systrace tool helps analyze the performance of your
|
||||
application by capturing and displaying execution times of your applications
|
||||
processes and other Android system processes.
|
||||
|
||||
Workloads
|
||||
~~~~~~~~~
|
||||
- ``blogbench``: Blogbench is a portable filesystem benchmark that tries to
|
||||
reproduce the load of a real-world busy file server.
|
||||
- ``stress-ng``: Designed to exercise various physical subsystems of a computer
|
||||
as well as the various operating system kernel interfaces.
|
||||
- ``hwuitest``: Uses hwuitest from AOSP to test rendering latency on Android
|
||||
devices.
|
||||
- ``recentfling``: Tests UI jank on android devices.
|
||||
- ``apklaunch``: installs and runs an arbitrary apk file.
|
||||
- ``googlemap``: Launches Google Maps and replays previously recorded
|
||||
interactions.
|
||||
|
||||
Framework
|
||||
~~~~~~~~~
|
||||
- ``wlauto.utils.misc``: Added ``memoised`` function decorator that allows
|
||||
caching of previous function/method call results.
|
||||
- Added new ``Device`` APIs:
|
||||
- ``lsmod``: lists kernel modules
|
||||
- ``insmod``: inserts a kernel module from a ``.ko`` file on the host.
|
||||
- ``get_binary_path``: Checks ``binary_directory`` for the wanted binary,
|
||||
if it is not found there it will try to use ``which``
|
||||
- ``install_if_needed``: Will only install a binary if it is not already
|
||||
on the target.
|
||||
- ``get_device_model``: Gets the model of the device.
|
||||
- ``wlauto.core.execution.ExecutionContext``:
|
||||
- ``add_classfiers``: Allows adding a classfier to all metrics for the
|
||||
current result.
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- Commands:
|
||||
- ``record``: Simplifies recording revent files.
|
||||
- ``replay``: Plays back revent files.
|
||||
|
||||
Fixes/Improvements:
|
||||
###################
|
||||
|
||||
Devices
|
||||
~~~~~~~
|
||||
- ``juno``:
|
||||
- Fixed ``bootargs`` parameter not being passed to ``_boot_via_uboot``.
|
||||
- Removed default ``bootargs``
|
||||
- ``gem5_linux``:
|
||||
- Added ``login_prompt`` and ``login_password_prompt`` parameters.
|
||||
- ``generic_linux``: ABI is now read from the target device.
|
||||
|
||||
Instruments
|
||||
~~~~~~~~~~~
|
||||
- ``trace-cmd``:
|
||||
- Added the ability to report the binary trace on the target device,
|
||||
removing the need for ``trace-cmd`` binary to be present on the host.
|
||||
- Updated to handle messages that the trace for a CPU is empty.
|
||||
- Made timeout for pulling trace 1 minute at minimum.
|
||||
- ``perf``: per-cpu statistics now get added as metrics to the results (with a
|
||||
classifier used to identify the cpu).
|
||||
- ``daq``:
|
||||
- Fixed bug where an exception would be raised if ``merge_channels=False``
|
||||
- No longer allows duplicate channel labels
|
||||
- ``juno_energy``:
|
||||
- Summary metrics are now calculated from the contents of ``energy.csv`` and
|
||||
added to the overall results.
|
||||
- Added a ``strict`` parameter. When this is set to ``False`` the device
|
||||
check during validation is omitted.
|
||||
- ``sysfs_extractor``: tar and gzip are now performed separately to solve
|
||||
permission issues.
|
||||
- ``fps``:
|
||||
- Now only checks for crashed content if ``crash_check`` is ``True``.
|
||||
- Can now process multiple ``view`` attributes.
|
||||
- ``hwmon``: Sensor naming fixed, they are also now added as result classifiers
|
||||
|
||||
Resource Getters
|
||||
~~~~~~~~~~~~~~~~
|
||||
- ``extension_asset``: Now picks up the path to the mounted filer from the
|
||||
``remote_assets_path`` global setting.
|
||||
|
||||
Result Processors
|
||||
~~~~~~~~~~~~~~~~~
|
||||
- ``cpustates``:
|
||||
- Added the ability to configure how a missing ``START`` marker in the trace
|
||||
is handled.
|
||||
- Now raises a warning when there is a ``START`` marker in the trace but no
|
||||
``STOP`` marker.
|
||||
- Exceptions in PowerStateProcessor no longer stop the processing of the
|
||||
rest of the trace.
|
||||
- Now ensures a known initial state by nudging each CPU to bring it out of
|
||||
idle and writing starting CPU frequencies to the trace.
|
||||
- Added the ability to create a CPU utilisation timeline.
|
||||
- Fixed issues with getting frequencies of hotplugged CPUs
|
||||
- ``csv``: Zero-value classifieres are no longer converted to an empty entry.
|
||||
- ``ipynb_exporter``: Default template no longer shows a blank plot for
|
||||
workloads without ``summary_metrics``
|
||||
|
||||
Workloads
|
||||
~~~~~~~~~
|
||||
- ``vellamo``:
|
||||
- Added support for v3.2.4.
|
||||
- Fixed getting values from logcat.
|
||||
- ``cameracapture``: Updated to work with Android M+.
|
||||
- ``camerarecord``: Updated to work with Android M+.
|
||||
- ``lmbench``:
|
||||
- Added the output file as an artifact.
|
||||
- Added taskset support
|
||||
- ``antutu`` - Added support for v6.0.1
|
||||
- ``ebizzy``: Fixed use of ``os.path`` to ``self.device.path``.
|
||||
- ``bbench``: Fixed browser crashes & permissions issues on android M+.
|
||||
- ``geekbench``:
|
||||
- Added check whether device is rooted.
|
||||
- ``manual``: Now only uses logcat on Android devices.
|
||||
- ``applaunch``:
|
||||
- Fixed ``cleanup`` not getting forwarded to script.
|
||||
- Added the ability to stress IO during app launch.
|
||||
- ``dhrystone``: Now uses WA's resource resolution to find it's binary so it
|
||||
uses the correct ABI.
|
||||
- ``glbench``: Updated for new logcat formatting.
|
||||
|
||||
Framework
|
||||
~~~~~~~~~
|
||||
- ``ReventWorkload``:
|
||||
- Now kills all revent instances on teardown.
|
||||
- Device model name is now used when searching for revent files, falling back
|
||||
to WA device name.
|
||||
- ``BaseLinuxDevice``:
|
||||
- ``killall`` will now run as root by default if the device
|
||||
is rooted.
|
||||
- ``list_file_systems`` now handles blank lines.
|
||||
- All binaries are now installed into ``binaries_directory`` this allows..
|
||||
- Busybox is now deployed on non-root devices.
|
||||
- gzipped property files are no zcat'ed
|
||||
- ``LinuxDevice``:
|
||||
- ``kick_off`` no longer requires root.
|
||||
- ``kick_off`` will now run as root by default if the device is rooted.
|
||||
- No longer raises an exception if a connection was dropped during a reboot.
|
||||
- Added a delay before polling for a connection to avoid re-connecting to a
|
||||
device that is still in the process of rebooting.
|
||||
- ``wlauto.utils.types``: ``list_or_string`` now ensures that elements of a list
|
||||
are strings.
|
||||
- ``AndroidDevice``:
|
||||
- ``kick_off`` no longer requires root.
|
||||
- Build props are now gathered via ``getprop`` rather than trying to parse
|
||||
build.prop directly.
|
||||
- WA now pushes its own ``sqlite3`` binary.
|
||||
- Now uses ``content`` instead of ``settings`` to get ``ANDROID_ID``
|
||||
- ``swipe_to_unlock`` parameter is now actually used. It has been changed to
|
||||
take a direction to accomodate various devices.
|
||||
- ``ensure_screen_is_on`` will now also unlock the screen if swipe_to_unlock
|
||||
is set.
|
||||
- Fixed use of variables in as_root=True commands.
|
||||
- ``get_pids_of`` now used ``busybox grep`` since as of Android M+ ps cannot
|
||||
filter by process name anymore.
|
||||
- Fixed installing APK files with whitespace in their path/name.
|
||||
- ``adb_shell``:
|
||||
- Fixed handling of line breaks at the end of command output.
|
||||
- Newline separator is now detected from the target.
|
||||
- As of ADB v1.0.35, ADB returns the return code of the command run. WA now
|
||||
handles this correctly.
|
||||
- ``ApkWorkload``:
|
||||
- Now attempts to grant all runtime permissions for devices on Android M+.
|
||||
- Can now launch packages that don't have a launch activity defined.
|
||||
- Package version is now added to results as a classifier.
|
||||
- Now clears app data if an uninstall failed to ensure it starts from a known
|
||||
state.
|
||||
- ``wlauto.utils.ipython``: Updated to work with ipython v5.
|
||||
- ``Gem5Device``:
|
||||
- Added support for deploying the ``m5`` binary.
|
||||
- No longer waits for the boot animation to finish if it has been disabled.
|
||||
- Fixed runtime error caused by lack of kwargs.
|
||||
- No longer depends on ``busybox``.
|
||||
- Split out commands to resize shell to ``resize_shell``.
|
||||
- Now tries to connect to the shell up to 10 times.
|
||||
- No longer renames gzipped files.
|
||||
- Agendas:
|
||||
- Now errors when an agenda key is empty.
|
||||
- ``wlauto.core.execution.RunInfo``: ``run_name`` will now default to
|
||||
``{output_folder}_{date}_{time}``.
|
||||
- Extensions:
|
||||
- Two different parameters can now have the same global alias as long as they
|
||||
their types match.
|
||||
- You can no longer ``override`` parameters that are defined at the same
|
||||
level.
|
||||
- ``wlauto.core.entry_point``: Now gives a better error when a config file
|
||||
doesn't exist.
|
||||
- ``wlauto.utils.misc``: Added ``aarch64`` to list for arm64 ABI.
|
||||
- ``wlauto.core.resolver``: Now shows what version was being search for when a
|
||||
resource is not found.
|
||||
- Will no longer start instruments ect. if a run has no workload specs.
|
||||
- ``wlauto.utils.uboot``: Now detects uboot version to use correct line endings.
|
||||
- ``wlauto.utils.trace_cmd``: Added a parser for sched_switch events.
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- Updated to pylint v1.5.1
|
||||
- Rebuilt ``busybox`` binaries to prefer built-in applets over system binaries.
|
||||
- ``BaseUiAutomation``: Added functions for checking version strings.
|
||||
|
||||
Incompatible changes
|
||||
####################
|
||||
|
||||
Instruments
|
||||
~~~~~~~~~~~
|
||||
- ``apk_version``: Removed, use result classifiers instead.
|
||||
|
||||
Framework
|
||||
~~~~~~~~~
|
||||
- ``BaseLinuxDevice``: Removed ``is_installed`` use ``install_if_needed`` and
|
||||
``get_binary_path`` instead.
|
||||
- ``LinuxDevice``: Removed ``has_root`` method, use ``is_rooted`` instead.
|
||||
- ``AndroidDevice``: ``swipe_to_unlock`` method replaced with
|
||||
``perform_unlock_swipe``.
|
||||
|
||||
-------------
|
||||
Version 2.4.0
|
||||
-------------
|
||||
|
||||
Additions:
|
||||
##########
|
||||
|
||||
Devices
|
||||
~~~~~~~~
|
||||
- ``gem5_linux`` and ``gem5_android``: Interfaces for Gem5 simulation
|
||||
environment running Linux and Android respectively.
|
||||
- ``XE503C1211``: Interface for Samsung XE503C12 Chromebooks.
|
||||
- ``chromeos_test_image``: Chrome OS test image device. An off the shelf
|
||||
device will not work with this device interface.
|
||||
|
||||
Instruments
|
||||
~~~~~~~~~~~~
|
||||
- ``freq_sweep``: Allows "sweeping" workloads across multiple CPU frequencies.
|
||||
- ``screenon``: Ensures screen is on, before each iteration, or periodically
|
||||
on Android devices.
|
||||
- ``energy_model``: This instrument can be used to generate an energy model
|
||||
for a device based on collected power and performance measurments.
|
||||
- ``netstats``: Allows monitoring data sent/received by applications on an
|
||||
Android device.
|
||||
|
||||
Modules
|
||||
~~~~~~~
|
||||
- ``cgroups``: Allows query and manipulation of cgroups controllers on a Linux
|
||||
device. Currently, only cpusets controller is implemented.
|
||||
- ``cpuidle``: Implements cpuidle state discovery, query and manipulation for
|
||||
a Linux device. This replaces the more primitive get_cpuidle_states method
|
||||
of LinuxDevice.
|
||||
- ``cpufreq`` has now been split out into a device module
|
||||
|
||||
Reasource Getters
|
||||
~~~~~~~~~~~~~~~~~
|
||||
- ``http_assets``: Downloads resources from a web server.
|
||||
|
||||
Results Processors
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
- ``ipynb_exporter``: Generates an IPython notebook from a template with the
|
||||
results and runs it.
|
||||
- ``notify``: Displays a desktop notification when a run finishes
|
||||
(Linux only).
|
||||
- ``cpustates``: Processes power ftrace to produce CPU state and parallelism
|
||||
stats. There is also a script to invoke this outside of WA.
|
||||
|
||||
Workloads
|
||||
~~~~~~~~~
|
||||
- ``telemetry``: Executes Google's Telemetery benchmarking framework
|
||||
- ``hackbench``: Hackbench runs tests on the Linux scheduler
|
||||
- ``ebizzy``: This workload resembles common web server application workloads.
|
||||
- ``power_loadtest``: Continuously cycles through a set of browser-based
|
||||
activities and monitors battery drain on a device (part of ChromeOS autotest
|
||||
suite).
|
||||
- ``rt-app``: Simulates configurable real-time periodic load.
|
||||
- ``linpack-cli``: Command line version of linpack benchmark.
|
||||
- ``lmbench``: A suite of portable ANSI/C microbenchmarks for UNIX/POSIX.
|
||||
- ``stream``: Measures memory bandwidth.
|
||||
- ``iozone``: Runs a series of disk I/O performance tests.
|
||||
- ``androbench``: Measures the storage performance of device.
|
||||
- ``autotest``: Executes tests from ChromeOS autotest suite.
|
||||
|
||||
Framework
|
||||
~~~~~~~~~
|
||||
- ``wlauto.utils``:
|
||||
- Added ``trace_cmd``, a generic trace-cmd paraser.
|
||||
- Added ``UbootMenu``, allows navigating Das U-boot menu over serial.
|
||||
- ``wlauto.utils.types``:
|
||||
- ``caseless_string``: Behaves exactly like a string, except this ignores
|
||||
case in comparisons. It does, however, preserve case.
|
||||
- ``list_of``: allows dynamic generation of type-safe list types based on
|
||||
an existing type.
|
||||
- ``arguments``: represents arguments that are passed on a command line to
|
||||
an application.
|
||||
- ``list-or``: allows dynamic generation of types that accept either a base
|
||||
type or a list of base type. Using this ``list_or_integer``,
|
||||
``list_or_number`` and ``list_or_bool`` were also added.
|
||||
- ``wlauto.core.configuration.WorkloadRunSpec``:
|
||||
- ``copy``: Allows making duplicates of ``WorkloadRunSpec``'s
|
||||
- ``wlatuo.utils.misc``:
|
||||
- ``list_to_ranges`` and ``ranges_to_list``: convert between lists of
|
||||
integers and corresponding range strings, e.g. between [0,1,2,4] and
|
||||
'0-2,4'
|
||||
- ``list_to_mask`` and ``mask_to_list``: convert between lists of integers
|
||||
and corresponding integer masks, e.g. between [0,1,2,4] and 0x17
|
||||
- ``wlauto.instrumentation``:
|
||||
- ``instrument_is_enabled``: Returns whether or not an instrument is
|
||||
enabled for the current job.
|
||||
- ``wlauto.core.result``:
|
||||
- Added "classifiers" field to Metric objects. This is a dict mapping
|
||||
classifier names (arbitrary strings) to corresponding values for that
|
||||
specific metrics. This is to allow extensions to add extension-specific
|
||||
annotations to metric that could be handled in a generic way (e.g. by
|
||||
result processors). They can also be set in agendas.
|
||||
- Failed jobs will now be automatically retired
|
||||
- Implemented dynamic device modules that may be loaded automatically on
|
||||
device initialization if the device supports them.
|
||||
- Added support for YAML configs.
|
||||
- Added ``initialze`` and ``finalize`` methods to workloads.
|
||||
- ``wlauto.core.ExecutionContext``:
|
||||
- Added ``job_status`` property that returns the status of the currently
|
||||
running job.
|
||||
|
||||
Fixes/Improvements
|
||||
##################
|
||||
|
||||
Devices
|
||||
~~~~~~~~
|
||||
- ``tc2``: Workaround for buffer overrun when loading large initrd blob.
|
||||
- ``juno``:
|
||||
- UEFI config can now be specified as a parameter.
|
||||
- Adding support for U-Boot booting.
|
||||
- No longer auto-disconnects ADB at the end of a run.
|
||||
- Added ``actually_disconnect`` to restore old disconnect behaviour
|
||||
- Now passes ``video`` command line to Juno kernel to work around a known
|
||||
issue where HDMI loses sync with monitors.
|
||||
- Fixed flashing.
|
||||
|
||||
Instruments
|
||||
~~~~~~~~~~~
|
||||
- ``trace_cmd``:
|
||||
- Fixed ``buffer_size_file`` for non-Android devices
|
||||
- Reduce starting priority.
|
||||
- Now handles trace headers and thread names with spaces
|
||||
- ``energy_probe``: Added ``device_entry`` parameter.
|
||||
- ``hwmon``:
|
||||
- Sensor discovery is now done only at the start of a run.
|
||||
- Now prints both before/after and mean temperatures.
|
||||
- ``daq``:
|
||||
- Now reports energy
|
||||
- Fixed file descriptor leak
|
||||
- ``daq_power.csv`` now matches the order of labels (if specified).
|
||||
- Added ``gpio_sync``. When enabled, this wil cause the instrument to
|
||||
insert a marker into ftrace, while at the same time setting a GPIO pin
|
||||
high.
|
||||
- Added ``negative_values`` parameter. which can be used to specify how
|
||||
negative values in the samples should be handled.
|
||||
- Added ``merge_channels`` parameter. When set DAQ channel will be summed
|
||||
together.
|
||||
- Workload labels, rather than names, are now used in the "workload"
|
||||
column.
|
||||
- ``cpufreq``:
|
||||
- Fixes missing directories problem.
|
||||
- Refined the availability check not to rely on the top-level cpu/cpufreq
|
||||
directory
|
||||
- Now handles non-integer output in ``get_available_frequencies``.
|
||||
- ``sysfs_extractor``:
|
||||
- No longer raises an error when both device and host paths are empty.
|
||||
- Fixed pulled files verification.
|
||||
- ``perf``:
|
||||
- Updated binaries.
|
||||
- Added option to force install.
|
||||
- ``killall`` is now run as root on rooted Android devices.
|
||||
- ``fps``:
|
||||
- now generates detailed FPS traces as well as report average FPS.
|
||||
- Updated jank calcluation to only count "large" janks.
|
||||
- Now filters out bogus ``actual-present`` times and ignore janks above
|
||||
``PAUSE_LATENCY``
|
||||
- ``delay``:
|
||||
- Added ``fixed_before_start`` parameter.
|
||||
- Changed existing ``*_between_specs`` and ``*_between_iterations``
|
||||
callbacks to be ``very_slow``
|
||||
- ``streamline``:
|
||||
- Added Linux support
|
||||
- ``gatord`` is now only started once at the start of the run.
|
||||
|
||||
modules
|
||||
~~~~~~~
|
||||
- ``flashing``:
|
||||
- Fixed vexpress flashing
|
||||
- Added an option to keep UEFI entry
|
||||
|
||||
Result Processors
|
||||
~~~~~~~~~~~~~~~~~
|
||||
- ``cpustate``:
|
||||
- Now generates a timeline csv as well as stats.
|
||||
- Adding ID to overall cpustate reports.
|
||||
- ``csv``: (partial) ``results.csv`` will now be written after each iteration
|
||||
rather than at the end of the run.
|
||||
|
||||
Workloads
|
||||
~~~~~~~~~
|
||||
- ``glb_corporate``: clears logcat to prevent getting results from previous
|
||||
run.
|
||||
- ``sysbench``:
|
||||
- Updated sysbench binary to a statically linked verison
|
||||
- Added ``file_test_mode parameter`` - this is a mandatory argumet if
|
||||
``test`` is ``"fileio"``.
|
||||
- Added ``cmd_params`` parameter to pass options directily to sysbench
|
||||
invocation.
|
||||
- Removed Android browser launch and shutdown from workload (now runs on
|
||||
both Linux and Android).
|
||||
- Now works with unrooted devices.
|
||||
- Added the ability to run based on time.
|
||||
- Added a parameter to taskset to specific core(s).
|
||||
- Added ``threads`` parameter to be consistent with dhrystone.
|
||||
- Fixed case where default ``timeout`` < ``max_time``.
|
||||
- ``Dhrystone``:
|
||||
- added ``taskset_mask`` parameter to allow pinning to specific cores.
|
||||
- Now kills any running instances during setup (also handles CTRL-C).
|
||||
- ``sysfs_extractor``: Added parameter to explicitly enable/disable tempfs
|
||||
caching.
|
||||
- ``antutu``:
|
||||
- Fixed multi-``times`` playback for v5.
|
||||
- Updated result parsing to handle Android M logcat output.
|
||||
- ``geekbench``: Increased timout to cater for slower devices.
|
||||
- ``idle``: Now works on Linux devices.
|
||||
- ``manhattan``: Added ``run_timemout`` parameter.
|
||||
- ``bbench``: Now works when binaries_directory is not in path.
|
||||
- ``nemamark``: Made duration configurable.
|
||||
|
||||
Framework
|
||||
~~~~~~~~~~
|
||||
- ``BaseLinuxDevice``:
|
||||
- Now checks that at least one core is enabled on another cluster before
|
||||
attempting to set number of cores on a cluster to ``0``.
|
||||
- No longer uses ``sudo`` if already logged in as ``root``.
|
||||
- Now saves ``dumpsys window`` output to the ``__meta`` directory.
|
||||
- Now takes ``password_prompt`` as a parameter for devices with a non
|
||||
standard ``sudo`` password prompt.
|
||||
- No longer raises an error if ``keyfile`` or ``password`` are not
|
||||
provided when they are not necessary.
|
||||
- Added new cpufreq APIs:
|
||||
- ``core`` APIs take a core name as the parameter (e.g. "a15")
|
||||
- ``cluster`` APIs take a numeric cluster ID (eg. 0)
|
||||
- ``cpu`` APIs take a cpufreq cpu ID as a parameter.
|
||||
- ``set_cpu_frequency`` now has a ``exact`` parameter. When true (the
|
||||
default) it will produce an error when the specified frequency is not
|
||||
supported by the cpu, otherwise cpufreq will decide what to do.
|
||||
- Added ``{core}_frequency`` runtime parameter to set cluster frequency.
|
||||
- Added ``abi`` property.
|
||||
- ``get_properties`` moved from ``LinuxDevice``, meaning ``AndroidDevice``
|
||||
will try to pull the same files. Added more paths to pull by default
|
||||
too.
|
||||
- fixed ``list_file_systems`` for Android M and Linux devices.
|
||||
- Now sets ``core_clusters`` from ``core_names`` if not explicitly
|
||||
specified.
|
||||
- Added ``invoke`` method that allows invoking an executable on the device
|
||||
under controlled contions (e.g. within a particular directory, or
|
||||
taskset to specific CPUs).
|
||||
- No longer attempts to ``get_sysfile_value()`` as root on unrooted
|
||||
devices.
|
||||
- ``LinuxDevice``:
|
||||
- Now creates ``binaries_directory`` path if it doesn't exist.
|
||||
- Fixed device reset
|
||||
- Fixed ``file_exists``
|
||||
- implemented ``get_pid_of()`` and ``ps()``. Existing implementation
|
||||
relied on Android version of ps.
|
||||
- ``listdir`` will now return an empty list for an empty directory
|
||||
instead of a list containing a single empty string.
|
||||
- ``AndroidDevice``:
|
||||
- Executable (un)installation now works on unrooted devices.
|
||||
- Now takes into account ``binar_directory`` when setting up busybox path.
|
||||
- update ``android_prompt`` so that it works even if is not ``"/"``
|
||||
- ``adb_connect``: do not assume port 5555 anymore.
|
||||
- Now always deploys busybox on rooted devices.
|
||||
- Added ``swipe_to_unlock`` method.
|
||||
- Fixed initialization of ``~/.workload_automation.``.
|
||||
- Fixed replaying events using revent on 64 bit platforms.
|
||||
- Improved error repoting when loading extensions.
|
||||
- ``result`` objects now track their output directories.
|
||||
- ``context.result`` will not result in ``context.run_result`` when not
|
||||
executing a job.
|
||||
- ``wlauto.utils.ssh``:
|
||||
- Fixed key-based authentication.
|
||||
- Fixed carriage return stripping in ssh.
|
||||
- Now takes ``password_prompt`` as a parameter for non standard ``sudo``
|
||||
password prompts.
|
||||
- Now with 100% more thread safety!
|
||||
- If a timeout condition is hit, ^C is now sent to kill the current
|
||||
foreground process and make the shell available for subsequent commands.
|
||||
- More robust ``exit_code`` handling for ssh interface
|
||||
- Now attempts to deal with dropped connections
|
||||
- Fixed error reporting on failed exit code extraction.
|
||||
- Now handles backspaces in serial output
|
||||
- Added ``port`` argument for telnet connections.
|
||||
- Now allows telnet connections without a password.
|
||||
- Fixed config processing for extensions with non-identifier names.
|
||||
- Fixed ``get_meansd`` for numbers < 1
|
||||
- ``wlatuo.utils.ipython``:
|
||||
- Now supports old versions of IPython
|
||||
- Updated version check to only initialize ipython utils if version is
|
||||
< 4.0.0. Version 4.0.0 changes API and breaks WA's usage of it.
|
||||
- Added ``ignore`` parameter to ``check_output``
|
||||
- Agendas:
|
||||
- Now raise an error if an agenda contains duplicate keys
|
||||
- Now raise an error if config section in an agenda is not dict-like
|
||||
- Now properly handles ``core_names`` and ``core_clusters``
|
||||
- When merging list parameters from different sources, duplicates are no
|
||||
longer removed.
|
||||
- The ``INITIAL_BOOT`` signal is now sent went performing a hard reset during
|
||||
intial boot
|
||||
- updated ``ExecutionContext`` to keep a reference to the ``runner``. This
|
||||
will enable Extenstions to do things like modify the job queue.
|
||||
- Parameter now automatically convert int and boot kinds to integer and
|
||||
boolean respectively, this behavior can be supressed by specifying
|
||||
``convert_types``=``False`` when defining the parameter.
|
||||
- Fixed resource resolution when dependency location does not exist.
|
||||
- All device ``push`` and ``pull`` commands now raise ``DeviceError`` if they
|
||||
didn't succeed.
|
||||
- Fixed showing Parameter default of ``False`` for boolean values.
|
||||
- Updated csv result processor with the option to use classifiers to
|
||||
add columns to ``results.csv``.
|
||||
- ``wlauto.utils.formatter``: Fix terminal size discovery.
|
||||
- The extension loader will now follow symlinks.
|
||||
- Added arm64-v8a to ABI map
|
||||
- WA now reports syntax errors in a more informative way.
|
||||
- Resource resolver: now prints the path of the found resource to the log.
|
||||
- Resource getter: look for executable in the bin/ directory under resource
|
||||
owner's dependencies directory as well as general dependencies bin.
|
||||
- ``GamingWorkload``:
|
||||
- Added an option to prevent clearing of package data before execution.
|
||||
- Added the ability to override the timeout of deploying the assets
|
||||
tarball.
|
||||
- ``ApkWorkload``: Added an option to skip host-side APK check entirely.
|
||||
- ``utils.misc.normalize``: only normalize string keys.
|
||||
- Better error reporting for subprocess.CalledProcessError
|
||||
- ``boolean`` now interprets ``'off'`` as ``False``
|
||||
- ``wlauto.utils.uefi``: Added support for debug builds.
|
||||
- ``wlauto.utils.serial_port``: Now supports fdexpect versions > 4.0.0
|
||||
- Semanatics for ``initialize``/``finalize`` for *all* Extensions are changed
|
||||
so that now they will always run at most once per run. They will not be
|
||||
executed twice even if invoked via instances of different subclasses (if
|
||||
those subclasses defined their own verions, then their versions will be
|
||||
invoked once each, but the base version will only get invoked once).
|
||||
- Pulling entries from procfs does not work on some platforms. WA now tries
|
||||
to cat the contents of a property_file and write it to a output file on the
|
||||
host.
|
||||
|
||||
Documentation
|
||||
~~~~~~~~~~~~~
|
||||
- ``installation``:
|
||||
- Added ``post install`` section which lists workloads that require
|
||||
additional external dependencies.
|
||||
- Added the ``uninstall`` and ``upgrade`` commands for users to remove or
|
||||
upgrade Workload Automation.
|
||||
- Added documentation explaining how to use ``remote_assets_path``
|
||||
setting.
|
||||
- Added warning about potential permission issues with pip.
|
||||
- ``quickstart``: Added steps for setting up WA to run on Linux devices.
|
||||
- ``device_setup``: fixed ``generic_linux`` ``device_config`` example.
|
||||
- ``contributing``: Clarified style guidelines
|
||||
- ``daq_device_setup``: Added an illustration for DAQ wiring.
|
||||
- ``writing_extensions``: Documented the Workload initialize and finalize
|
||||
methods.
|
||||
- Added descriptions to extension that didn't have one.
|
||||
|
||||
Other
|
||||
~~~~~
|
||||
- ``daq_server``:
|
||||
- Fixed showing available devices.
|
||||
- Now works with earlier versions of the DAQmx driver.thus you can now run
|
||||
the server on Linux systems.
|
||||
- DAQ error messages are now properly propaged to the client.
|
||||
- Server will now periodically clean up uncollected files.
|
||||
- fixed not being able to resolve IP address for hostname
|
||||
(report "localhost" in that case).
|
||||
- Works with latest version of twisted.
|
||||
- ``setup.py``: Fixed paths to work with Mac OS X.
|
||||
- ``summary_csv`` is no longer enabled by default.
|
||||
- ``status`` result processor is now enabled by default.
|
||||
- Commands:
|
||||
- ``show``:
|
||||
- Now shows what platform extensions support.
|
||||
- Will no longer try to use a pager if ``PAGER=''`` in the environment.
|
||||
- ``list``:
|
||||
- Added ``"-p"`` option to filter results by supported platforms.
|
||||
- Added ``"--packaged-only"`` option to only list extensions packaged
|
||||
with WA.
|
||||
- ``run``: Added ``"--disable"`` option to diable instruments.
|
||||
- ``create``:
|
||||
- Added ``agenda`` sub-command to generate agendas for a set of
|
||||
extensions.
|
||||
- ``create workload`` now gives more informative errors if Android SDK
|
||||
installed but no platform has been downloaded.
|
||||
|
||||
Incompatible changes
|
||||
####################
|
||||
|
||||
Framework
|
||||
~~~~~~~~~
|
||||
- ``BaseLinuxDevice``:
|
||||
- Renamed ``active_cpus`` to ``online_cpus``
|
||||
- Renamed ``get_cluster_cpu`` to ``get_cluster_active_cpu``
|
||||
- Renamed ``get_core_cpu`` to ``get_core_online_cpu``
|
||||
- All extension's ``initialize`` function now takes one (and only one)
|
||||
parameter, ``context``.
|
||||
- ``wlauto.core.device``: Removed ``init`` function. Replaced with
|
||||
``initialize``
|
||||
|
||||
-------------
|
||||
Version 2.3.0
|
||||
-------------
|
||||
|
||||
|
@@ -113,7 +113,7 @@ pygments_style = 'sphinx'
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
html_theme = 'classic'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
|
@@ -97,6 +97,32 @@ Available Settings
|
||||
|
||||
Added in version 2.1.5.
|
||||
|
||||
|
||||
.. confval:: retry_on_status
|
||||
|
||||
This is list of statuses on which a job will be cosidered to have failed and
|
||||
will be automatically retried up to ``max_retries`` times. This defaults to
|
||||
``["FAILED", "PARTIAL"]`` if not set. Possible values are:
|
||||
|
||||
``"OK"``
|
||||
This iteration has completed and no errors have been detected
|
||||
|
||||
``"PARTIAL"``
|
||||
One or more instruments have failed (the iteration may still be running).
|
||||
|
||||
``"FAILED"``
|
||||
The workload itself has failed.
|
||||
|
||||
``"ABORTED"``
|
||||
The user interupted the workload
|
||||
|
||||
.. confval:: max_retries
|
||||
|
||||
The maximum number of times failed jobs will be retried before giving up. If
|
||||
not set, this will default to ``3``.
|
||||
|
||||
.. note:: this number does not include the original attempt
|
||||
|
||||
.. confval:: instrumentation
|
||||
|
||||
This should be a list of instruments to be enabled during run execution.
|
||||
@@ -136,6 +162,12 @@ Available Settings
|
||||
All three values should be Python `old-style format strings`_ specifying which
|
||||
`log record attributes`_ should be displayed.
|
||||
|
||||
.. confval:: remote_assets_path
|
||||
|
||||
Path to the local mount of a network assets repository. See
|
||||
:ref:`assets_repository`.
|
||||
|
||||
|
||||
There are also a couple of settings are used to provide additional metadata
|
||||
for a run. These may get picked up by instruments or result processors to
|
||||
attach context to results.
|
||||
|
@@ -2,12 +2,23 @@
|
||||
Contributing Code
|
||||
=================
|
||||
|
||||
We welcome code contributions via GitHub pull requests to the official WA
|
||||
repository. To help with maintainability of the code line we ask that the code
|
||||
uses a coding style consistent with the rest of WA code, which is basically
|
||||
`PEP8 <https://www.python.org/dev/peps/pep-0008/>`_ with line length and block
|
||||
comment rules relaxed (the wrapper for PEP8 checker inside ``dev_scripts`` will
|
||||
run it with appropriate configuration).
|
||||
We welcome code contributions via GitHub pull requests.To help with
|
||||
maintainability of the code line we ask that the code uses a coding style
|
||||
consistent with the rest of WA code. Briefly, it is
|
||||
|
||||
- `PEP8 <https://www.python.org/dev/peps/pep-0008/>`_ with line length and block
|
||||
comment rules relaxed (the wrapper for PEP8 checker inside ``dev_scripts``
|
||||
will run it with appropriate configuration).
|
||||
- Four-space indentation (*no tabs!*).
|
||||
- Title-case for class names, underscore-delimited lower case for functions,
|
||||
methods, and variables.
|
||||
- Use descriptive variable names. Delimit words with ``'_'`` for readability.
|
||||
Avoid shortening words, skipping vowels, etc (common abbreviations such as
|
||||
"stats" for "statistics", "config" for "configuration", etc are OK). Do
|
||||
*not* use Hungarian notation (so prefer ``birth_date`` over ``dtBirth``).
|
||||
|
||||
New extensions should also follow implementation guidelines specified in
|
||||
:ref:`writing_extensions` section of the documentation.
|
||||
|
||||
We ask that the following checks are performed on the modified code prior to
|
||||
submitting a pull request:
|
||||
|
BIN
doc/source/daq-wiring.png
Normal file
BIN
doc/source/daq-wiring.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 151 KiB |
@@ -68,6 +68,12 @@ varies between models.
|
||||
possible to use any other configuration (e.g. ports 1, 2 and 5).
|
||||
|
||||
|
||||
As an example, the following illustration shows the wiring of PORT0 (using AI/0
|
||||
and AI/1 channels) on a DAQ USB-6210
|
||||
|
||||
.. image:: daq-wiring.png
|
||||
:scale: 70 %
|
||||
|
||||
Setting up NI-DAQmx driver on a Windows Machine
|
||||
-----------------------------------------------
|
||||
|
||||
|
@@ -64,7 +64,7 @@ you might want to change are outlined below.
|
||||
advanced WA functionality (like setting of core-related runtime parameters
|
||||
such as governors, frequencies, etc). ``core_names`` should be a list of
|
||||
core names matching the order in which they are exposed in sysfs. For
|
||||
example, ARM TC2 SoC is a 2x3 big.LITTLE system; it's core_names would be
|
||||
example, ARM TC2 SoC is a 2x3 big.LITTLE system; its core_names would be
|
||||
``['a7', 'a7', 'a7', 'a15', 'a15']``, indicating that cpu0-cpu2 in cpufreq
|
||||
sysfs structure are A7's and cpu3 and cpu4 are A15's.
|
||||
|
||||
@@ -363,11 +363,11 @@ A typical ``device_config`` inside ``config.py`` may look something like
|
||||
.. code-block:: python
|
||||
|
||||
device_config = dict(
|
||||
'host'='192.168.0.7',
|
||||
'username'='guest',
|
||||
'password'='guest',
|
||||
'core_names'=['a7', 'a7', 'a7', 'a15', 'a15'],
|
||||
'core_clusters'=[0, 0, 0, 1, 1],
|
||||
host='192.168.0.7',
|
||||
username='guest',
|
||||
password='guest',
|
||||
core_names=['a7', 'a7', 'a7', 'a15', 'a15'],
|
||||
core_clusters=[0, 0, 0, 1, 1],
|
||||
# ...
|
||||
)
|
||||
|
||||
|
@@ -118,6 +118,7 @@ and detailed descriptions of how WA functions under the hood.
|
||||
additional_topics
|
||||
daq_device_setup
|
||||
revent
|
||||
apk_workloads
|
||||
contributing
|
||||
|
||||
API Reference
|
||||
|
@@ -15,16 +15,23 @@ Operating System
|
||||
|
||||
WA runs on a native Linux install. It was tested with Ubuntu 12.04,
|
||||
but any recent Linux distribution should work. It should run on either
|
||||
32bit or 64bit OS, provided the correct version of Android (see below)
|
||||
32-bit or 64-bit OS, provided the correct version of Android (see below)
|
||||
was installed. Officially, **other environments are not supported**. WA
|
||||
has been known to run on Linux Virtual machines and in Cygwin environments,
|
||||
though additional configuration maybe required in both cases (known issues
|
||||
though additional configuration may be required in both cases (known issues
|
||||
include makings sure USB/serial connections are passed to the VM, and wrong
|
||||
python/pip binaries being picked up in Cygwin). WA *should* work on other
|
||||
Unix-based systems such as BSD or Mac OS X, but it has not been tested
|
||||
in those environments. WA *does not* run on Windows (though it should be
|
||||
possible to get limited functionality with minimal porting effort).
|
||||
|
||||
.. Note:: If you plan to run Workload Automation on Linux devices only,
|
||||
SSH is required, and Android SDK is optional if you wish
|
||||
to run WA on Android devices at a later time. Then follow the
|
||||
steps to install the necessary python packages to set up WA.
|
||||
|
||||
However, you would be starting off with a limited number of
|
||||
workloads that will run on Linux devices.
|
||||
|
||||
Android SDK
|
||||
-----------
|
||||
@@ -32,12 +39,11 @@ Android SDK
|
||||
You need to have the Android SDK with at least one platform installed.
|
||||
To install it, download the ADT Bundle from here_. Extract it
|
||||
and add ``<path_to_android_sdk>/sdk/platform-tools`` and ``<path_to_android_sdk>/sdk/tools``
|
||||
to your ``PATH``. To test that you've installed it properly run ``adb
|
||||
version``, the output should be similar to this::
|
||||
to your ``PATH``. To test that you've installed it properly, run ``adb
|
||||
version``. The output should be similar to this::
|
||||
|
||||
$$ adb version
|
||||
adb version
|
||||
Android Debug Bridge version 1.0.31
|
||||
$$
|
||||
|
||||
.. _here: https://developer.android.com/sdk/index.html
|
||||
|
||||
@@ -53,11 +59,16 @@ usually the best bet.
|
||||
Optionally (but recommended), you should also set ``ANDROID_HOME`` to point to
|
||||
the install location of the SDK (i.e. ``<path_to_android_sdk>/sdk``).
|
||||
|
||||
.. note:: You may need to install 32-bit compatibility libararies for the SDK
|
||||
to work properly. On Ubuntu you need to run::
|
||||
|
||||
sudo apt-get install lib32stdc++6 lib32z1
|
||||
|
||||
|
||||
Python
|
||||
------
|
||||
|
||||
Workload Automation 2 requires Python 2.7 (Python 3 is not supported, at the moment).
|
||||
Workload Automation 2 requires Python 2.7 (Python 3 is not supported at the moment).
|
||||
|
||||
|
||||
pip
|
||||
@@ -69,6 +80,23 @@ similar distributions, this may be done with APT::
|
||||
|
||||
sudo apt-get install python-pip
|
||||
|
||||
.. note:: Some versions of pip (in particluar v1.5.4 which comes with Ubuntu
|
||||
14.04) are know to set the wrong permissions when installing
|
||||
packages, resulting in WA failing to import them. To avoid this it
|
||||
is recommended that you update pip and setuptools before proceeding
|
||||
with installation::
|
||||
|
||||
sudo -H pip install --upgrade pip
|
||||
sudo -H pip install --upgrade setuptools
|
||||
|
||||
If you do run into this issue after already installing some packages,
|
||||
you can resolve it by running ::
|
||||
|
||||
sudo chmod -R a+r /usr/local/lib/python2.7/dist-packagessudo
|
||||
find /usr/local/lib/python2.7/dist-packages -type d -exec chmod a+x {} \;
|
||||
|
||||
(The paths above will work for Ubuntu; they may need to be adjusted
|
||||
for other distros).
|
||||
|
||||
Python Packages
|
||||
---------------
|
||||
@@ -86,11 +114,11 @@ Workload Automation 2 depends on the following additional libraries:
|
||||
|
||||
You can install these with pip::
|
||||
|
||||
sudo pip install pexpect
|
||||
sudo pip install pyserial
|
||||
sudo pip install pyyaml
|
||||
sudo pip install docutils
|
||||
sudo pip install python-dateutil
|
||||
sudo -H pip install pexpect
|
||||
sudo -H pip install pyserial
|
||||
sudo -H pip install pyyaml
|
||||
sudo -H pip install docutils
|
||||
sudo -H pip install python-dateutil
|
||||
|
||||
Some of these may also be available in your distro's repositories, e.g. ::
|
||||
|
||||
@@ -129,12 +157,26 @@ may not always have Internet access).
|
||||
headers to install. You can get those by installing ``python-dev``
|
||||
package in apt on Ubuntu (or the equivalent for your distribution).
|
||||
|
||||
|
||||
Installing
|
||||
==========
|
||||
|
||||
Download the tarball and run pip::
|
||||
Installing the latest released version from PyPI (Python Package Index)::
|
||||
|
||||
sudo -H pip install wlauto
|
||||
|
||||
This will install WA along with its mandatory dependencies. If you would like to
|
||||
install all optional dependencies at the same time, do the following instead::
|
||||
|
||||
sudo -H pip install wlauto[all]
|
||||
|
||||
Alternatively, you can also install the latest development version from GitHub
|
||||
(you will need git installed for this to work)::
|
||||
|
||||
git clone git@github.com:ARM-software/workload-automation.git workload-automation
|
||||
sudo -H pip install ./workload-automation
|
||||
|
||||
|
||||
sudo pip install wlauto-$version.tar.gz
|
||||
|
||||
If the above succeeds, try ::
|
||||
|
||||
@@ -142,3 +184,161 @@ If the above succeeds, try ::
|
||||
|
||||
Hopefully, this should output something along the lines of "Workload Automation
|
||||
version $version".
|
||||
|
||||
|
||||
(Optional) Post Installation
|
||||
============================
|
||||
|
||||
Some WA extensions have additional dependencies that need to be
|
||||
statisfied before they can be used. Not all of these can be provided with WA and
|
||||
so will need to be supplied by the user. They should be placed into
|
||||
``~/.workload_automation/dependencies/<extenion name>`` so that WA can find
|
||||
them (you may need to create the directory if it doesn't already exist). You
|
||||
only need to provide the dependencies for workloads you want to use.
|
||||
|
||||
Binary Files
|
||||
------------
|
||||
|
||||
Some workloads require native binaries to work. Different binaries will be required
|
||||
for different ABIs. WA may not include the required binary for a workload due to
|
||||
licensing/distribution issues, or may not have a binary compiled for your device's
|
||||
ABI. In such cases, you will have to supply the missing binaries.
|
||||
|
||||
Executable binaries for a workload should be placed inside
|
||||
``~/.workload_automation/dependencies/<extension name>/bin/<ABI>`` directory.
|
||||
This directory may not already exist, in which case you would have to create it.
|
||||
|
||||
Binaries placed in that location will take precidence over any already inclueded with
|
||||
WA. For example, if you have your own ``drystone`` binary compiled for ``arm64``,
|
||||
and you want WA to pick it up, you can do the following on WA host machine ::
|
||||
|
||||
mkdir -p ~/.workload_automation/dependencies/dhrystone/bin/arm64/
|
||||
cp /path/to/your/dhrystone ~/.workload_automation/dependencies/dhrystone/bin/arm64/
|
||||
|
||||
APK Files
|
||||
---------
|
||||
|
||||
APKs are applicaton packages used by Android. These are necessary to install an
|
||||
application onto devices that do not have Google Play (e.g. devboards running
|
||||
AOSP). The following is a list of workloads that will need one, including the
|
||||
version(s) for which UI automation has been tested. Automation may also work
|
||||
with other versions (especially if it's only a minor or revision difference --
|
||||
major version differens are more likely to contain incompatible UI changes) but
|
||||
this has not been tested.
|
||||
|
||||
================ ============================================ ========================= ============ ============
|
||||
workload package name version code version name
|
||||
================ ============================================ ========================= ============ ============
|
||||
andebench com.eembc.coremark AndEBench v1383a 1383
|
||||
angrybirds com.rovio.angrybirds Angry Birds 2.1.1 2110
|
||||
angrybirds_rio com.rovio.angrybirdsrio Angry Birds 1.3.2 1320
|
||||
anomaly2 com.elevenbitstudios.anomaly2Benchmark A2 Benchmark 1.1 50
|
||||
antutu com.antutu.ABenchMark AnTuTu Benchmark 5.3 5030000
|
||||
antutu com.antutu.ABenchMark AnTuTu Benchmark 3.3.2 3322
|
||||
antutu com.antutu.ABenchMark AnTuTu Benchmark 4.0.3 4000300
|
||||
benchmarkpi gr.androiddev.BenchmarkPi BenchmarkPi 1.11 5
|
||||
caffeinemark com.flexycore.caffeinemark CaffeineMark 1.2.4 9
|
||||
castlebuilder com.ettinentertainment.castlebuilder Castle Builder 1.0 1
|
||||
castlemaster com.alphacloud.castlemaster Castle Master 1.09 109
|
||||
cfbench eu.chainfire.cfbench CF-Bench 1.2 7
|
||||
citadel com.epicgames.EpicCitadel Epic Citadel 1.07 901107
|
||||
dungeondefenders com.trendy.ddapp Dungeon Defenders 5.34 34
|
||||
facebook com.facebook.katana Facebook 3.4 258880
|
||||
geekbench ca.primatelabs.geekbench2 Geekbench 2 2.2.7 202007
|
||||
geekbench com.primatelabs.geekbench3 Geekbench 3 3.0.0 135
|
||||
glb_corporate net.kishonti.gfxbench GFXBench 3.0.0 1
|
||||
glbenchmark com.glbenchmark.glbenchmark25 GLBenchmark 2.5 2.5 4
|
||||
glbenchmark com.glbenchmark.glbenchmark27 GLBenchmark 2.7 2.7 1
|
||||
gunbros2 com.glu.gunbros2 GunBros2 1.2.2 122
|
||||
ironman com.gameloft.android.ANMP.GloftIMHM Iron Man 3 1.3.1 1310
|
||||
krazykart com.polarbit.sg2.krazyracers Krazy Kart Racing 1.2.7 127
|
||||
linpack com.greenecomputing.linpackpro Linpack Pro for Android 1.2.9 31
|
||||
nenamark se.nena.nenamark2 NenaMark2 2.4 5
|
||||
peacekeeper com.android.chrome Chrome 18.0.1025469 1025469
|
||||
peacekeeper org.mozilla.firefox Firefox 23.0 2013073011
|
||||
quadrant com.aurorasoftworks.quadrant.ui.professional Quadrant Professional 2.0 2000000
|
||||
realracing3 com.ea.games.r3_row Real Racing 3 1.3.5 1305
|
||||
smartbench com.smartbench.twelve Smartbench 2012 1.0.0 5
|
||||
sqlite com.redlicense.benchmark.sqlite RL Benchmark 1.3 5
|
||||
templerun com.imangi.templerun Temple Run 1.0.8 11
|
||||
thechase com.unity3d.TheChase The Chase 1.0 1
|
||||
truckerparking3d com.tapinator.truck.parking.bus3d Truck Parking 3D 2.5 7
|
||||
vellamo com.quicinc.vellamo Vellamo 3.0 3001
|
||||
vellamo com.quicinc.vellamo Vellamo 2.0.3 2003
|
||||
videostreaming tw.com.freedi.youtube.player FREEdi YT Player 2.1.13 79
|
||||
================ ============================================ ========================= ============ ============
|
||||
|
||||
Gaming Workloads
|
||||
----------------
|
||||
|
||||
Some workloads (games, demos, etc) cannot be automated using Android's
|
||||
UIAutomator framework because they render the entire UI inside a single OpenGL
|
||||
surface. For these, an interaction session needs to be recorded so that it can
|
||||
be played back by WA. These recordings are device-specific, so they would need
|
||||
to be done for each device you're planning to use. The tool for doing is
|
||||
``revent`` and it is packaged with WA. You can find instructions on how to use
|
||||
it :ref:`here <revent_files_creation>`.
|
||||
|
||||
This is the list of workloads that rely on such recordings:
|
||||
|
||||
+------------------+
|
||||
| angrybirds |
|
||||
+------------------+
|
||||
| angrybirds_rio |
|
||||
+------------------+
|
||||
| anomaly2 |
|
||||
+------------------+
|
||||
| castlebuilder |
|
||||
+------------------+
|
||||
| castlemastera |
|
||||
+------------------+
|
||||
| citadel |
|
||||
+------------------+
|
||||
| dungeondefenders |
|
||||
+------------------+
|
||||
| gunbros2 |
|
||||
+------------------+
|
||||
| ironman |
|
||||
+------------------+
|
||||
| krazykart |
|
||||
+------------------+
|
||||
| realracing3 |
|
||||
+------------------+
|
||||
| templerun |
|
||||
+------------------+
|
||||
| truckerparking3d |
|
||||
+------------------+
|
||||
|
||||
.. _assets_repository:
|
||||
|
||||
Maintaining Centralized Assets Repository
|
||||
-----------------------------------------
|
||||
|
||||
If there are multiple users within an organization that may need to deploy
|
||||
assets for WA extensions, that organization may wish to maintain a centralized
|
||||
repository of assets that individual WA installs will be able to automatically
|
||||
retrieve asset files from as they are needed. This repository can be any
|
||||
directory on a network filer that mirrors the structure of
|
||||
``~/.workload_automation/dependencies``, i.e. has a subdirectories named after
|
||||
the extensions which assets they contain. Individual WA installs can then set
|
||||
``remote_assets_path`` setting in their config to point to the local mount of
|
||||
that location.
|
||||
|
||||
|
||||
(Optional) Uninstalling
|
||||
=======================
|
||||
|
||||
If you have installed Workload Automation via ``pip`` and wish to remove it, run this command to
|
||||
uninstall it::
|
||||
|
||||
sudo -H pip uninstall wlauto
|
||||
|
||||
.. Note:: This will *not* remove any user configuration (e.g. the ~/.workload_automation directory)
|
||||
|
||||
|
||||
(Optional) Upgrading
|
||||
====================
|
||||
|
||||
To upgrade Workload Automation to the latest version via ``pip``, run::
|
||||
|
||||
sudo -H pip install --upgrade --no-deps wlauto
|
||||
|
@@ -61,13 +61,13 @@ Instrument method realive to other callbacks registered for the signal (within t
|
||||
level, callbacks are invoked in the order they were registered). The table below shows the mapping
|
||||
of the prifix to the corresponding priority:
|
||||
|
||||
=========== ===
|
||||
=========== ========
|
||||
prefix priority
|
||||
=========== ===
|
||||
very_fast\_ 20
|
||||
fast\_ 10
|
||||
normal\_ 0
|
||||
slow\_ -10
|
||||
very_slow\_ -20
|
||||
=========== ===
|
||||
=========== ========
|
||||
very_fast\_ 20
|
||||
fast\_ 10
|
||||
normal\_ 0
|
||||
slow\_ -10
|
||||
very_slow\_ -20
|
||||
=========== ========
|
||||
|
||||
|
@@ -1,11 +1,12 @@
|
||||
.. _invocation:
|
||||
.. highlight:: none
|
||||
|
||||
========
|
||||
Commands
|
||||
========
|
||||
|
||||
Installing the wlauto package will add ``wa`` command to your system,
|
||||
which you can run from anywhere. This has a number of sub-commands, which can
|
||||
which you can run from anywhere. This has a number of sub-commands, which can
|
||||
be viewed by executing ::
|
||||
|
||||
wa -h
|
||||
@@ -15,7 +16,7 @@ Individual sub-commands are discussed in detail below.
|
||||
run
|
||||
---
|
||||
|
||||
The most common sub-command you will use is ``run``. This will run specfied
|
||||
The most common sub-command you will use is ``run``. This will run specified
|
||||
workload(s) and process resulting output. This takes a single mandatory
|
||||
argument that specifies what you want WA to run. This could be either a
|
||||
workload name, or a path to an "agenda" file that allows to specify multiple
|
||||
@@ -24,7 +25,7 @@ section for details). Executing ::
|
||||
|
||||
wa run -h
|
||||
|
||||
Will display help for this subcommand that will look somehtign like this::
|
||||
Will display help for this subcommand that will look something like this::
|
||||
|
||||
usage: run [-d DIR] [-f] AGENDA
|
||||
|
||||
@@ -47,13 +48,13 @@ Will display help for this subcommand that will look somehtign like this::
|
||||
--debug Enable debug mode. Note: this implies --verbose.
|
||||
-d DIR, --output-directory DIR
|
||||
Specify a directory where the output will be
|
||||
generated. If the directoryalready exists, the script
|
||||
generated. If the directory already exists, the script
|
||||
will abort unless -f option (see below) is used,in
|
||||
which case the contents of the directory will be
|
||||
overwritten. If this optionis not specified, then
|
||||
overwritten. If this option is not specified, then
|
||||
wa_output will be used instead.
|
||||
-f, --force Overwrite output directory if it exists. By default,
|
||||
the script will abort in thissituation to prevent
|
||||
the script will abort in this situation to prevent
|
||||
accidental data loss.
|
||||
-i ID, --id ID Specify a workload spec ID from an agenda to run. If
|
||||
this is specified, only that particular spec will be
|
||||
@@ -81,10 +82,74 @@ agenda file used to run the workloads along with any other device-specific
|
||||
configuration files used during execution.
|
||||
|
||||
|
||||
create
|
||||
------
|
||||
|
||||
This can be used to create various WA-related objects, currently workloads, packages and agendas.
|
||||
The full set of options for this command are::
|
||||
|
||||
usage: wa create [-h] [-c CONFIG] [-v] [--debug] [--version]
|
||||
{workload,package,agenda} ...
|
||||
|
||||
positional arguments:
|
||||
{workload,package,agenda}
|
||||
workload Create a new workload. By default, a basic workload
|
||||
template will be used but you can use options to
|
||||
specify a different template.
|
||||
package Create a new empty Python package for WA extensions.
|
||||
On installation, this package will "advertise" itself
|
||||
to WA so that Extensions with in it will be loaded by
|
||||
WA when it runs.
|
||||
agenda Create an agenda whit the specified extensions
|
||||
enabled. And parameters set to their default values.
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-c CONFIG, --config CONFIG
|
||||
specify an additional config.py
|
||||
-v, --verbose The scripts will produce verbose output.
|
||||
--debug Enable debug mode. Note: this implies --verbose.
|
||||
--version show program's version number and exit
|
||||
|
||||
Use "wa create <object> -h" to see all the object-specific arguments. For example::
|
||||
|
||||
wa create agenda -h
|
||||
|
||||
will display the relevant options that can be used to create an agenda.
|
||||
|
||||
get-assets
|
||||
----------
|
||||
|
||||
This command can download external extension dependencies used by Workload Automation.
|
||||
It can be used to download assets for all available extensions or those specificity listed.
|
||||
The full set of options for this command are::
|
||||
|
||||
usage: wa get-assets [-h] [-c CONFIG] [-v] [--debug] [--version] [-f]
|
||||
[--url URL] (-a | -e EXT [EXT ...])
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-c CONFIG, --config CONFIG
|
||||
specify an additional config.py
|
||||
-v, --verbose The scripts will produce verbose output.
|
||||
--debug Enable debug mode. Note: this implies --verbose.
|
||||
--version show program's version number and exit
|
||||
-f, --force Always fetch the assets, even if matching versions
|
||||
exist in local cache.
|
||||
--url URL The location from which to download the files. If not
|
||||
provided, config setting ``remote_assets_url`` will be
|
||||
used if available, else uses the default
|
||||
REMOTE_ASSETS_URL parameter in the script.
|
||||
-a, --all Download assets for all extensions found in the index.
|
||||
Cannot be used with -e.
|
||||
-e EXT [EXT ...] One or more extensions whose assets to download.
|
||||
Cannot be used with --all.
|
||||
|
||||
|
||||
list
|
||||
----
|
||||
|
||||
This lists all extensions of a particular type. For example ::
|
||||
This lists all extensions of a particular type. For example::
|
||||
|
||||
wa list workloads
|
||||
|
||||
@@ -97,11 +162,11 @@ show
|
||||
|
||||
This will show detailed information about an extension, including more in-depth
|
||||
description and any parameters/configuration that are available. For example
|
||||
executing ::
|
||||
executing::
|
||||
|
||||
wa show andebench
|
||||
|
||||
will produce something like ::
|
||||
will produce something like::
|
||||
|
||||
|
||||
andebench
|
||||
@@ -131,5 +196,64 @@ will produce something like ::
|
||||
- Results displayed in Iterations per second
|
||||
- Detailed log file for comprehensive engineering analysis
|
||||
|
||||
.. _record-command:
|
||||
|
||||
record
|
||||
------
|
||||
|
||||
This command simplifies the process of recording an revent file. It
|
||||
will automatically deploy revent and even has the option of automatically
|
||||
opening apps. WA uses two parts to the names of revent recordings in the
|
||||
format, {device_name}.{suffix}.revent. - device_name can either be specified
|
||||
manually with the ``-d`` argument or it can be automatically determined. On
|
||||
Android device it will be obtained from ``build.prop``, on Linux devices it is
|
||||
obtained from ``/proc/device-tree/model``. - suffix is used by WA to determine
|
||||
which part of the app execution the recording is for, currently these are
|
||||
either ``setup`` or ``run``. This should be specified with the ``-s``
|
||||
argument. The full set of options for this command are::
|
||||
|
||||
usage: wa record [-h] [-c CONFIG] [-v] [--debug] [--version] [-d DEVICE]
|
||||
[-s SUFFIX] [-o OUTPUT] [-p PACKAGE] [-g] [-C]
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-c CONFIG, --config CONFIG
|
||||
specify an additional config.py
|
||||
-v, --verbose The scripts will produce verbose output.
|
||||
--debug Enable debug mode. Note: this implies --verbose.
|
||||
--version show program's version number and exit
|
||||
-d DEVICE, --device DEVICE
|
||||
The name of the device
|
||||
-s SUFFIX, --suffix SUFFIX
|
||||
The suffix of the revent file, e.g. ``setup``
|
||||
-o OUTPUT, --output OUTPUT
|
||||
Directory to save the recording in
|
||||
-p PACKAGE, --package PACKAGE
|
||||
Package to launch before recording
|
||||
-g, --gamepad Record from a gamepad rather than all devices.
|
||||
-C, --clear Clear app cache before launching it
|
||||
|
||||
.. _replay-command:
|
||||
|
||||
replay
|
||||
------
|
||||
|
||||
Along side ``record`` wa also has a command to playback recorded revent files.
|
||||
It behaves very similar to the ``record`` command taking many of the same options::
|
||||
|
||||
usage: wa replay [-h] [-c CONFIG] [-v] [--debug] [--version] [-p PACKAGE] [-C]
|
||||
revent
|
||||
|
||||
positional arguments:
|
||||
revent The name of the file to replay
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-c CONFIG, --config CONFIG
|
||||
specify an additional config.py
|
||||
-v, --verbose The scripts will produce verbose output.
|
||||
--debug Enable debug mode. Note: this implies --verbose.
|
||||
--version show program's version number and exit
|
||||
-p PACKAGE, --package PACKAGE
|
||||
Package to launch before recording
|
||||
-C, --clear Clear app cache before launching it
|
||||
|
@@ -2,7 +2,7 @@
|
||||
Quickstart
|
||||
==========
|
||||
|
||||
This sections will show you how to quickly start running workloads using
|
||||
This guide will show you how to quickly start running workloads using
|
||||
Workload Automation 2.
|
||||
|
||||
|
||||
@@ -13,22 +13,26 @@ Install
|
||||
the :doc:`installation` section.
|
||||
|
||||
Make sure you have Python 2.7 and a recent Android SDK with API level 18 or above
|
||||
installed on your system. For the SDK, make sure that either ``ANDROID_HOME``
|
||||
environment variable is set, or that ``adb`` is in your ``PATH``.
|
||||
installed on your system. A complete install of the Android SDK is required, as
|
||||
WA uses a number of its utilities, not just adb. For the SDK, make sure that either
|
||||
``ANDROID_HOME`` environment variable is set, or that ``adb`` is in your ``PATH``.
|
||||
|
||||
.. note:: A complete install of the Android SDK is required, as WA uses a
|
||||
number of its utilities, not just adb.
|
||||
.. Note:: If you plan to run Workload Automation on Linux devices only, SSH is required,
|
||||
and Android SDK is optional if you wish to run WA on Android devices at a
|
||||
later time.
|
||||
|
||||
However, you would be starting off with a limited number of workloads that
|
||||
will run on Linux devices.
|
||||
|
||||
In addition to the base Python 2.7 install, you will also need to have ``pip``
|
||||
(Python's package manager) installed as well. This is usually a separate package.
|
||||
|
||||
Once you have the pre-requisites and a tarball with the workload automation package,
|
||||
you can install it with pip::
|
||||
Once you have those, you can install WA with::
|
||||
|
||||
sudo pip install wlauto-2.2.0dev.tar.gz
|
||||
sudo -H pip install wlauto
|
||||
|
||||
This will install Workload Automation on your system, along with the Python
|
||||
packages it depends on.
|
||||
This will install Workload Automation on your system, along with its mandatory
|
||||
dependencies.
|
||||
|
||||
(Optional) Verify installation
|
||||
-------------------------------
|
||||
@@ -52,15 +56,23 @@ For more details, please see the :doc:`installation` section.
|
||||
Configure Your Device
|
||||
=====================
|
||||
|
||||
Out of the box, WA is configured to work with a generic Android device through
|
||||
``adb``. If you only have one device listed when you execute ``adb devices``,
|
||||
and your device has a standard Android configuration, then no extra configuration
|
||||
is required (if your device is connected via network, you will have to manually execute
|
||||
``adb connect <device ip>`` so that it appears in the device listing).
|
||||
Locate the device configuration file, config.py, under the
|
||||
~/.workload_automation directory. Then adjust the device
|
||||
configuration settings accordingly to the device you are using.
|
||||
|
||||
If you have multiple devices connected, you will need to tell WA which one you
|
||||
want it to use. You can do that by setting ``adb_name`` in device configuration inside
|
||||
``~/.workload_automation/config.py``\ , e.g.
|
||||
Android
|
||||
-------
|
||||
|
||||
By default, the device is set to 'generic_android'. WA is configured to work
|
||||
with a generic Android device through ``adb``. If you only have one device listed
|
||||
when you execute ``adb devices``, and your device has a standard Android
|
||||
configuration, then no extra configuration is required.
|
||||
|
||||
However, if your device is connected via network, you will have to manually execute
|
||||
``adb connect <device ip>`` so that it appears in the device listing.
|
||||
|
||||
If you have multiple devices connected, you will need to tell WA which one you
|
||||
want it to use. You can do that by setting ``adb_name`` in device_config section.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -73,10 +85,73 @@ want it to use. You can do that by setting ``adb_name`` in device configuration
|
||||
|
||||
# ...
|
||||
|
||||
This should give you basic functionality. If your device has non-standard
|
||||
Android configuration (e.g. it's a development board) or your need some advanced
|
||||
functionality (e.g. big.LITTLE tuning parameters), additional configuration may
|
||||
be required. Please see the :doc:`device_setup` section for more details.
|
||||
Linux
|
||||
-----
|
||||
|
||||
First, set the device to 'generic_linux'
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# ...
|
||||
device = 'generic_linux'
|
||||
# ...
|
||||
|
||||
Find the device_config section and add these parameters
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# ...
|
||||
|
||||
device_config = dict(
|
||||
host = '192.168.0.100',
|
||||
username = 'root',
|
||||
password = 'password'
|
||||
# ...
|
||||
)
|
||||
|
||||
# ...
|
||||
|
||||
Parameters:
|
||||
|
||||
- Host is the IP of your target Linux device
|
||||
- Username is the user for the device
|
||||
- Password is the password for the device
|
||||
|
||||
Enabling and Disabling Instrumentation
|
||||
---------------------------------------
|
||||
|
||||
Some instrumentation tools are enabled after your initial install of WA.
|
||||
|
||||
.. note:: Some Linux devices may not be able to run certain instruments
|
||||
provided by WA (e.g. cpufreq is disabled or unsupported by the
|
||||
device).
|
||||
|
||||
As a start, keep the 'execution_time' instrument enabled while commenting out
|
||||
the rest to disable them.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# ...
|
||||
|
||||
Instrumentation = [
|
||||
# Records the time it took to run the workload
|
||||
'execution_time',
|
||||
|
||||
# Collects /proc/interrupts before and after execution and does a diff.
|
||||
# 'interrupts',
|
||||
|
||||
# Collects the contents of/sys/devices/system/cpu before and after execution and does a diff.
|
||||
# 'cpufreq',
|
||||
|
||||
# ...
|
||||
)
|
||||
|
||||
|
||||
|
||||
This should give you basic functionality. If you are working with a development
|
||||
board or you need some advanced functionality (e.g. big.LITTLE tuning parameters),
|
||||
additional configuration may be required. Please see the :doc:`device_setup`
|
||||
section for more details.
|
||||
|
||||
|
||||
Running Your First Workload
|
||||
@@ -155,8 +230,55 @@ This agenda
|
||||
the config.py.
|
||||
- Disables execution_time instrument, if it is enabled in the config.py
|
||||
|
||||
There is a lot more that could be done with an agenda. Please see :doc:`agenda`
|
||||
section for details.
|
||||
An agenda can be created in a text editor and saved as a YAML file. Please make note of
|
||||
where you have saved the agenda.
|
||||
|
||||
Please see :doc:`agenda` section for more options.
|
||||
|
||||
.. _YAML: http://en.wikipedia.org/wiki/YAML
|
||||
|
||||
Examples
|
||||
========
|
||||
|
||||
These examples show some useful options with the ``wa run`` command.
|
||||
|
||||
To run your own agenda::
|
||||
|
||||
wa run <path/to/agenda> (e.g. wa run ~/myagenda.yaml)
|
||||
|
||||
To redirect the output to a different directory other than wa_output::
|
||||
|
||||
wa run dhrystone -d my_output_directory
|
||||
|
||||
To use a different config.py file::
|
||||
|
||||
wa run -c myconfig.py dhrystone
|
||||
|
||||
To use the same output directory but override existing contents to
|
||||
store new dhrystone results::
|
||||
|
||||
wa run -f dhrystone
|
||||
|
||||
To display verbose output while running memcpy::
|
||||
|
||||
wa run --verbose memcpy
|
||||
|
||||
Uninstall
|
||||
=========
|
||||
|
||||
If you have installed Workload Automation via ``pip``, then run this command to
|
||||
uninstall it::
|
||||
|
||||
sudo pip uninstall wlauto
|
||||
|
||||
|
||||
.. Note:: It will *not* remove any user configuration (e.g. the ~/.workload_automation
|
||||
directory).
|
||||
|
||||
Upgrade
|
||||
=======
|
||||
|
||||
To upgrade Workload Automation to the latest version via ``pip``, run::
|
||||
|
||||
sudo pip install --upgrade --no-deps wlauto
|
||||
|
||||
|
@@ -1,3 +1,5 @@
|
||||
.. _resources:
|
||||
|
||||
Dynamic Resource Resolution
|
||||
===========================
|
||||
|
||||
@@ -7,10 +9,10 @@ The idea is to decouple resource identification from resource discovery.
|
||||
Workloads/instruments/devices/etc state *what* resources they need, and not
|
||||
*where* to look for them -- this instead is left to the resource resolver that
|
||||
is now part of the execution context. The actual discovery of resources is
|
||||
performed by resource getters that are registered with the resolver.
|
||||
performed by resource getters that are registered with the resolver.
|
||||
|
||||
A resource type is defined by a subclass of
|
||||
:class:`wlauto.core.resource.Resource`. An instance of this class describes a
|
||||
:class:`wlauto.core.resource.Resource`. An instance of this class describes a
|
||||
resource that is to be obtained. At minimum, a ``Resource`` instance has an
|
||||
owner (which is typically the object that is looking for the resource), but
|
||||
specific resource types may define other parameters that describe an instance of
|
||||
|
@@ -1,7 +1,10 @@
|
||||
.. _revent_files_creation:
|
||||
|
||||
revent
|
||||
======
|
||||
++++++
|
||||
|
||||
Overview and Usage
|
||||
==================
|
||||
|
||||
revent utility can be used to record and later play back a sequence of user
|
||||
input events, such as key presses and touch screen taps. This is an alternative
|
||||
@@ -20,28 +23,44 @@ to Android UI Automator for providing automation for workloads. ::
|
||||
Recording
|
||||
---------
|
||||
|
||||
To record, transfer the revent binary to the device, then invoke ``revent
|
||||
record``, giving it the time (in seconds) you want to record for, and the
|
||||
file you want to record to (WA expects these files to have .revent
|
||||
extension)::
|
||||
WA features a ``record`` command that will automatically deploy and start
|
||||
revent on the target device::
|
||||
|
||||
host$ adb push revent /data/local/revent
|
||||
host$ adb shell
|
||||
device# cd /data/local
|
||||
device# ./revent record 1000 my_recording.revent
|
||||
wa record
|
||||
INFO Connecting to device...
|
||||
INFO Press Enter when you are ready to record...
|
||||
[Pressed Enter]
|
||||
INFO Press Enter when you have finished recording...
|
||||
[Pressed Enter]
|
||||
INFO Pulling files from device
|
||||
|
||||
Once started, you will need to get the target device ready to record (e.g.
|
||||
unlock screen, navigate menus and launch an app) then press ``ENTER``.
|
||||
The recording has now started and button presses, taps, etc you perform on
|
||||
the device will go into the .revent file. To stop the recording simply press
|
||||
``ENTER`` again.
|
||||
|
||||
Once you have finished recording the revent file will be pulled from the device
|
||||
to the current directory. It will be named ``{device_model}.revent``. When
|
||||
recording revent files for a ``GameWorkload`` you can use the ``-s`` option to
|
||||
add ``run`` or ``setup`` suffixes.
|
||||
|
||||
From version 2.6 of WA onwards, a "gamepad" recording mode is also supported.
|
||||
This mode requires a gamepad to be connected to the device when recoridng, but
|
||||
the recordings produced in this mode should be portable across devices.
|
||||
|
||||
For more information run please read :ref:`record-command`
|
||||
|
||||
The recording has now started and button presses, taps, etc you perform on the
|
||||
device will go into the .revent file. The recording will stop after the
|
||||
specified time period, and you can also stop it by hitting return in the adb
|
||||
shell.
|
||||
|
||||
Replaying
|
||||
---------
|
||||
|
||||
To replay a recorded file, run ``revent replay`` on the device, giving it the
|
||||
file you want to replay::
|
||||
To replay a recorded file, run ``wa replay``, giving it the file you want to
|
||||
replay::
|
||||
|
||||
device# ./revent replay my_recording.revent
|
||||
wa replay my_recording.revent
|
||||
|
||||
For more information run please read :ref:`replay-command`
|
||||
|
||||
|
||||
Using revent With Workloads
|
||||
@@ -95,3 +114,359 @@ where as UI Automator only works for Android UI elements (such as text boxes or
|
||||
radio buttons), which makes the latter useless for things like games. Recording
|
||||
revent sequence is also faster than writing automation code (on the other hand,
|
||||
one would need maintain a different revent log for each screen resolution).
|
||||
|
||||
|
||||
Using state detection with revent
|
||||
=================================
|
||||
|
||||
State detection can be used to verify that a workload is executing as expected.
|
||||
This utility, if enabled, and if state definitions are available for the
|
||||
particular workload, takes a screenshot after the setup and the run revent
|
||||
sequence, matches the screenshot to a state and compares with the expected
|
||||
state. A WorkloadError is raised if an unexpected state is encountered.
|
||||
|
||||
To enable state detection, make sure a valid state definition file and
|
||||
templates exist for your workload and set the check_states parameter to True.
|
||||
|
||||
State definition directory
|
||||
--------------------------
|
||||
|
||||
State and phase definitions should be placed in a directory of the following
|
||||
structure inside the dependencies directory of each workload (along with
|
||||
revent files etc):
|
||||
|
||||
::
|
||||
|
||||
dependencies/
|
||||
<workload_name>/
|
||||
state_definitions/
|
||||
definition.yaml
|
||||
templates/
|
||||
<oneTemplate>.png
|
||||
<anotherTemplate>.png
|
||||
...
|
||||
|
||||
definition.yaml file
|
||||
--------------------
|
||||
|
||||
This defines each state of the workload and lists which templates are expected
|
||||
to be found and how many are required to be detected for a conclusive match. It
|
||||
also defines the expected state in each workload phase where a state detection
|
||||
is run (currently those are setup_complete and run_complete).
|
||||
|
||||
Templates are picture elements to be matched in a screenshot. Each template
|
||||
mentioned in the definition file should be placed as a file with the same name
|
||||
and a .png extension inside the templates folder. Creating template png files
|
||||
is as simple as taking a screenshot of the workload in a given state, cropping
|
||||
out the relevant templates (eg. a button, label or other unique element that is
|
||||
present in that state) and storing them in PNG format.
|
||||
|
||||
Please see the definition file for Angry Birds below as an example to
|
||||
understand the format. Note that more than just two states (for the afterSetup
|
||||
and afterRun phase) can be defined and this helps track the cause of errors in
|
||||
case an unexpected state is encountered.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
workload_name: angrybirds
|
||||
|
||||
workload_states:
|
||||
- state_name: titleScreen
|
||||
templates:
|
||||
- play_button
|
||||
- logo
|
||||
matches: 2
|
||||
- state_name: worldSelection
|
||||
templates:
|
||||
- first_world_thumb
|
||||
- second_world_thumb
|
||||
- third_world_thumb
|
||||
- fourth_world_thumb
|
||||
matches: 3
|
||||
- state_name: level_selection
|
||||
templates:
|
||||
- locked_level
|
||||
- first_level
|
||||
matches: 2
|
||||
- state_name: gameplay
|
||||
templates:
|
||||
- pause_button
|
||||
- score_label_text
|
||||
matches: 2
|
||||
- state_name: pause_screen
|
||||
templates:
|
||||
- replay_button
|
||||
- menu_button
|
||||
- resume_button
|
||||
- help_button
|
||||
matches: 4
|
||||
- state_name: level_cleared_screen
|
||||
templates:
|
||||
- level_cleared_text
|
||||
- menu_button
|
||||
- replay_button
|
||||
- fast_forward_button
|
||||
matches: 4
|
||||
|
||||
workload_phases:
|
||||
- phase_name: setup_complete
|
||||
expected_state: gameplay
|
||||
- phase_name: run_complete
|
||||
expected_state: level_cleared_screen
|
||||
|
||||
|
||||
File format of revent recordings
|
||||
================================
|
||||
|
||||
You do not need to understand recording format in order to use revent. This
|
||||
section is intended for those looking to extend revent in some way, or to
|
||||
utilize revent recordings for other purposes.
|
||||
|
||||
Format Overview
|
||||
---------------
|
||||
|
||||
Recordings are stored in a binary format. A recording consists of three
|
||||
sections::
|
||||
|
||||
+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Header |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| Device Description |
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| |
|
||||
| Event Stream |
|
||||
| |
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+
|
||||
|
||||
The header contains metadata describing the recording. The device description
|
||||
contains information about input devices involved in this recording. Finally,
|
||||
the event stream contains the recorded input events.
|
||||
|
||||
All fields are either fixed size or prefixed with their length or the number of
|
||||
(fixed-sized) elements.
|
||||
|
||||
.. note:: All values below are little endian
|
||||
|
||||
|
||||
Recording Header
|
||||
----------------
|
||||
|
||||
An revent recoding header has the following structure
|
||||
|
||||
* It starts with the "magic" string ``REVENT`` to indicate that this is an
|
||||
revent recording.
|
||||
* The magic is followed by a 16 bit version number. This indicates the format
|
||||
version of the recording that follows. Current version is ``2``.
|
||||
* The next 16 bits indicate the type of the recording. This dictates the
|
||||
structure of the Device Description section. Valid values are:
|
||||
|
||||
``0``
|
||||
This is a general input event recording. The device description
|
||||
contains a list of paths from which the events where recorded.
|
||||
``1``
|
||||
This a gamepad recording. The device description contains the
|
||||
description of the gamepad used to create the recording.
|
||||
|
||||
* The header is zero-padded to 128 bits.
|
||||
|
||||
::
|
||||
|
||||
0 1 2 3
|
||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| 'R' | 'E' | 'V' | 'E' |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| 'N' | 'T' | Version |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Mode | PADDING |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| PADDING |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
|
||||
|
||||
Device Description
|
||||
------------------
|
||||
|
||||
This section describes the input devices used in the recording. Its structure is
|
||||
determined by the value of ``Mode`` field in the header.
|
||||
|
||||
general recording
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. note:: This is the only format supported prior to version ``2``.
|
||||
|
||||
The recording has been made from all available input devices. This section
|
||||
contains the list of ``/dev/input`` paths for the devices, prefixed with total
|
||||
number of the devices recorded.
|
||||
|
||||
::
|
||||
|
||||
0 1 2 3
|
||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Number of devices |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| Device paths +-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
|
||||
|
||||
Similarly, each device path is a length-prefixed string. Unlike C strings, the
|
||||
path is *not* NULL-terminated.
|
||||
|
||||
::
|
||||
|
||||
0 1 2 3
|
||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Length of device path |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| Device path |
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
|
||||
|
||||
gamepad recording
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
The recording has been made from a specific gamepad. All events in the stream
|
||||
will be for that device only. The section describes the device properties that
|
||||
will be used to create a virtual input device using ``/dev/uinput``. Please
|
||||
see ``linux/input.h`` header in the Linux kernel source for more information
|
||||
about the fields in this section.
|
||||
|
||||
::
|
||||
|
||||
0 1 2 3
|
||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| bustype | vendor |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| product | version |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| name_length |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| name |
|
||||
| |
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| ev_bits |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| |
|
||||
| key_bits (96 bytes) |
|
||||
| |
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| |
|
||||
| rel_bits (96 bytes) |
|
||||
| |
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| |
|
||||
| abs_bits (96 bytes) |
|
||||
| |
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| num_absinfo |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
| absinfo entries |
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
| |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
|
||||
|
||||
Each ``absinfo`` entry consists of six 32 bit values. The number of entries is
|
||||
determined by the ``abs_bits`` field.
|
||||
|
||||
|
||||
::
|
||||
|
||||
0 1 2 3
|
||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| value |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| minimum |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| maximum |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| fuzz |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| flat |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| resolution |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
|
||||
|
||||
Event structure
|
||||
---------------
|
||||
|
||||
The majority of an revent recording will be made up of the input events that were
|
||||
recorded. The event stream is prefixed with the number of events in the stream.
|
||||
|
||||
Each event entry structured as follows:
|
||||
|
||||
* An unsigned integer representing which device from the list of device paths
|
||||
this event is for (zero indexed). E.g. Device ID = 3 would be the 4th
|
||||
device in the list of device paths.
|
||||
* A signed integer representing the number of seconds since "epoch" when the
|
||||
event was recorded.
|
||||
* A signed integer representing the microseconds part of the timestamp.
|
||||
* An unsigned integer representing the event type
|
||||
* An unsigned integer representing the event code
|
||||
* An unsigned integer representing the event value
|
||||
|
||||
For more information about the event type, code and value please read:
|
||||
https://www.kernel.org/doc/Documentation/input/event-codes.txt
|
||||
|
||||
::
|
||||
|
||||
0 1 2 3
|
||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Device ID | Timestamp Seconds |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Timestamp Seconds (cont.) |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Timestamp Seconds (cont.) | stamp Micoseconds |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Timestamp Micoseconds (cont.) |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Timestamp Micoseconds (cont.) | Event Type |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Event Code | Event Value |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
| Event Value (cont.) |
|
||||
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|
||||
|
||||
|
||||
Parser
|
||||
------
|
||||
|
||||
WA has a parser for revent recordings. This can be used to work with revent
|
||||
recordings in scripts. Here is an example:
|
||||
|
||||
.. code:: python
|
||||
|
||||
from wlauto.utils.revent import ReventRecording
|
||||
|
||||
with ReventRecording('/path/to/recording.revent') as recording:
|
||||
print "Recording: {}".format(recording.filepath)
|
||||
print "There are {} input events".format(recording.num_events)
|
||||
print "Over a total of {} seconds".format(recording.duration)
|
||||
|
@@ -1,3 +1,5 @@
|
||||
.. _writing_extensions:
|
||||
|
||||
==================
|
||||
Writing Extensions
|
||||
==================
|
||||
@@ -9,7 +11,7 @@ interesting of these are
|
||||
can be benchmarks, high-level use cases, or pretty much anything else.
|
||||
:devices: These are interfaces to the physical devices (development boards or end-user
|
||||
devices, such as smartphones) that use cases run on. Typically each model of a
|
||||
physical device would require it's own interface class (though some functionality
|
||||
physical device would require its own interface class (though some functionality
|
||||
may be reused by subclassing from an existing base).
|
||||
:instruments: Instruments allow collecting additional data from workload execution (e.g.
|
||||
system traces). Instruments are not specific to a particular Workload. Instruments
|
||||
@@ -29,7 +31,7 @@ Extension Basics
|
||||
================
|
||||
|
||||
This sub-section covers things common to implementing extensions of all types.
|
||||
It is recommended you familiarize yourself with the information here before
|
||||
It is recommended you familiarize yourself with the information here before
|
||||
proceeding onto guidance for specific extension types.
|
||||
|
||||
To create an extension, you basically subclass an appropriate base class and them
|
||||
@@ -39,22 +41,22 @@ The Context
|
||||
-----------
|
||||
|
||||
The majority of methods in extensions accept a context argument. This is an
|
||||
instance of :class:`wlauto.core.execution.ExecutionContext`. If contains
|
||||
instance of :class:`wlauto.core.execution.ExecutionContext`. If contains
|
||||
of information about current state of execution of WA and keeps track of things
|
||||
like which workload is currently running and the current iteration.
|
||||
|
||||
Notable attributes of the context are
|
||||
|
||||
context.spec
|
||||
context.spec
|
||||
the current workload specification being executed. This is an
|
||||
instance of :class:`wlauto.core.configuration.WorkloadRunSpec`
|
||||
and defines the workload and the parameters under which it is
|
||||
being executed.
|
||||
being executed.
|
||||
|
||||
context.workload
|
||||
context.workload
|
||||
``Workload`` object that is currently being executed.
|
||||
|
||||
context.current_iteration
|
||||
context.current_iteration
|
||||
The current iteration of the spec that is being executed. Note that this
|
||||
is the iteration for that spec, i.e. the number of times that spec has
|
||||
been run, *not* the total number of all iterations have been executed so
|
||||
@@ -77,9 +79,9 @@ In addition to these, context also defines a few useful paths (see below).
|
||||
Paths
|
||||
-----
|
||||
|
||||
You should avoid using hard-coded absolute paths in your extensions whenever
|
||||
You should avoid using hard-coded absolute paths in your extensions whenever
|
||||
possible, as they make your code too dependent on a particular environment and
|
||||
may mean having to make adjustments when moving to new (host and/or device)
|
||||
may mean having to make adjustments when moving to new (host and/or device)
|
||||
platforms. To help avoid hard-coded absolute paths, WA automation defines
|
||||
a number of standard locations. You should strive to define your paths relative
|
||||
to one of those.
|
||||
@@ -93,7 +95,7 @@ extension methods.
|
||||
context.run_output_directory
|
||||
This is the top-level output directory for all WA results (by default,
|
||||
this will be "wa_output" in the directory in which WA was invoked.
|
||||
|
||||
|
||||
context.output_directory
|
||||
This is the output directory for the current iteration. This will an
|
||||
iteration-specific subdirectory under the main results location. If
|
||||
@@ -102,7 +104,7 @@ context.output_directory
|
||||
|
||||
context.host_working_directory
|
||||
This an addition location that may be used by extensions to store
|
||||
non-iteration specific intermediate files (e.g. configuration).
|
||||
non-iteration specific intermediate files (e.g. configuration).
|
||||
|
||||
Additionally, the global ``wlauto.settings`` object exposes on other location:
|
||||
|
||||
@@ -130,12 +132,63 @@ device, the ``os.path`` modules should *not* be used for on-device path
|
||||
manipulation. Instead device has an equipment module exposed through
|
||||
``device.path`` attribute. This has all the same attributes and behaves the
|
||||
same way as ``os.path``, but is guaranteed to produce valid paths for the device,
|
||||
irrespective of the host's path notation.
|
||||
irrespective of the host's path notation. For example:
|
||||
|
||||
.. code:: python
|
||||
|
||||
result_file = self.device.path.join(self.device.working_directory, "result.txt")
|
||||
self.command = "{} -a -b -c {}".format(target_binary, result_file)
|
||||
|
||||
.. note:: result processors, unlike workloads and instruments, do not have their
|
||||
own device attribute; however they can access the device through the
|
||||
context.
|
||||
|
||||
Deploying executables to a device
|
||||
---------------------------------
|
||||
|
||||
Some devices may have certain restrictions on where executable binaries may be
|
||||
placed and how they should be invoked. To ensure your extension works with as
|
||||
wide a range of devices as possible, you should use WA APIs for deploying and
|
||||
invoking executables on a device, as outlined below.
|
||||
|
||||
As with other resources (see :ref:`resources`) , host-side paths to the exectuable
|
||||
binary to be deployed should be obtained via the resource resolver. A special
|
||||
resource type, ``Executable`` is used to identify a binary to be deployed.
|
||||
This is simiar to the regular ``File`` resource, however it takes an additional
|
||||
parameter that specifies the ABI for which executable was compiled.
|
||||
|
||||
In order for the binary to be obtained in this way, it must be stored in one of
|
||||
the locations scanned by the resource resolver in a directry structure
|
||||
``<root>/bin/<abi>/<binary>`` (where ``root`` is the base resource location to
|
||||
be searched, e.g. ``~/.workload_automation/depencencies/<extension name>``, and
|
||||
``<abi>`` is the ABI for which the exectuable has been compiled, as returned by
|
||||
``self.device.abi``).
|
||||
|
||||
Once the path to the host-side binary has been obtained, it may be deployed using
|
||||
one of two methods of a ``Device`` instace -- ``install`` or ``install_if_needed``.
|
||||
The latter will check a version of that binary has been perviously deployed by
|
||||
WA and will not try to re-install.
|
||||
|
||||
.. code:: python
|
||||
|
||||
from wlauto import Executable
|
||||
|
||||
host_binary = context.resolver.get(Executable(self, self.device.abi, 'some_binary'))
|
||||
target_binary = self.device.install_if_needed(host_binary)
|
||||
|
||||
|
||||
.. note:: Please also note that the check is done based solely on the binary name.
|
||||
For more information please see: :func:`wlauto.common.linux.BaseLinuxDevice.install_if_needed`
|
||||
|
||||
Both of the above methods will return the path to the installed binary on the
|
||||
device. The executable should be invoked *only* via that path; do **not** assume
|
||||
that it will be in ``PATH`` on the target (or that the executable with the same
|
||||
name in ``PATH`` is the version deployed by WA.
|
||||
|
||||
.. code:: python
|
||||
|
||||
self.command = "{} -a -b -c".format(target_binary)
|
||||
self.device.execute(self.command)
|
||||
|
||||
Parameters
|
||||
----------
|
||||
@@ -186,11 +239,11 @@ mandatory
|
||||
and there really is no sensible default that could be given
|
||||
(e.g. something like login credentials), should you consider
|
||||
making it mandatory.
|
||||
|
||||
|
||||
constraint
|
||||
This is an additional constraint to be enforced on the parameter beyond
|
||||
its type or fixed allowed values set. This should be a predicate (a function
|
||||
that takes a single argument -- the user-supplied value -- and returns
|
||||
its type or fixed allowed values set. This should be a predicate (a function
|
||||
that takes a single argument -- the user-supplied value -- and returns
|
||||
a ``bool`` indicating whether the constraint has been satisfied).
|
||||
|
||||
override
|
||||
@@ -199,7 +252,7 @@ override
|
||||
with the same name as already exists, you will get an error. If you do
|
||||
want to override a parameter from further up in the inheritance
|
||||
hierarchy, you can indicate that by setting ``override`` attribute to
|
||||
``True``.
|
||||
``True``.
|
||||
|
||||
When overriding, you do not need to specify every other attribute of the
|
||||
parameter, just the ones you what to override. Values for the rest will
|
||||
@@ -220,7 +273,7 @@ surrounding environment (e.g. that the device has been initialized).
|
||||
|
||||
The contract for ``validate`` method is that it should raise an exception
|
||||
(either ``wlauto.exceptions.ConfigError`` or extension-specific exception type -- see
|
||||
further on this page) if some validation condition has not, and cannot, been met.
|
||||
further on this page) if some validation condition has not, and cannot, been met.
|
||||
If the method returns without raising an exception, then the extension is in a
|
||||
valid internal state.
|
||||
|
||||
@@ -240,7 +293,7 @@ everything it is doing, so you shouldn't need to add much additional logging in
|
||||
your expansion's. But you might what to log additional information, e.g.
|
||||
what settings your extension is using, what it is doing on the host, etc.
|
||||
Operations on the host will not normally be logged, so your extension should
|
||||
definitely log what it is doing on the host. One situation in particular where
|
||||
definitely log what it is doing on the host. One situation in particular where
|
||||
you should add logging is before doing something that might take a significant amount
|
||||
of time, such as downloading a file.
|
||||
|
||||
@@ -257,7 +310,7 @@ Subsequent paragraphs (separated by blank lines) can then provide a more
|
||||
detailed description, including any limitations and setup instructions.
|
||||
|
||||
For parameters, the description is passed as an argument on creation. Please
|
||||
note that if ``default``, ``allowed_values``, or ``constraint``, are set in the
|
||||
note that if ``default``, ``allowed_values``, or ``constraint``, are set in the
|
||||
parameter, they do not need to be explicitly mentioned in the description (wa
|
||||
documentation utilities will automatically pull those). If the ``default`` is set
|
||||
in ``validate`` or additional cross-parameter constraints exist, this *should*
|
||||
@@ -302,7 +355,7 @@ Utils
|
||||
Workload Automation defines a number of utilities collected under
|
||||
:mod:`wlauto.utils` subpackage. These utilities were created to help with the
|
||||
implementation of the framework itself, but may be also be useful when
|
||||
implementing extensions.
|
||||
implementing extensions.
|
||||
|
||||
|
||||
Adding a Workload
|
||||
@@ -324,22 +377,31 @@ The Workload class defines the following interface::
|
||||
|
||||
def init_resources(self, context):
|
||||
pass
|
||||
|
||||
def setup(self, context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def run(self, context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def update_result(self, context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def teardown(self, context):
|
||||
raise NotImplementedError()
|
||||
|
||||
def validate(self):
|
||||
pass
|
||||
|
||||
def initialize(self, context):
|
||||
pass
|
||||
|
||||
def setup(self, context):
|
||||
pass
|
||||
|
||||
def setup(self, context):
|
||||
pass
|
||||
|
||||
def run(self, context):
|
||||
pass
|
||||
|
||||
def update_result(self, context):
|
||||
pass
|
||||
|
||||
def teardown(self, context):
|
||||
pass
|
||||
|
||||
def finalize(self, context):
|
||||
pass
|
||||
|
||||
.. note:: Please see :doc:`conventions` section for notes on how to interpret
|
||||
this.
|
||||
|
||||
@@ -348,8 +410,23 @@ The interface should be implemented as follows
|
||||
:name: This identifies the workload (e.g. it used to specify it in the
|
||||
agenda_.
|
||||
:init_resources: This method may be optionally override to implement dynamic
|
||||
resource discovery for the workload.
|
||||
**Added in version 2.1.3**
|
||||
resource discovery for the workload. This method executes
|
||||
early on, before the device has been initialized, so it
|
||||
should only be used to initialize resources that do not
|
||||
depend on the device to resolve. This method is executed
|
||||
once per run for each workload instance.
|
||||
:validate: This method can be used to validate any assumptions your workload
|
||||
makes about the environment (e.g. that required files are
|
||||
present, environment variables are set, etc) and should raise
|
||||
a :class:`wlauto.exceptions.WorkloadError` if that is not the
|
||||
case. The base class implementation only makes sure sure that
|
||||
the name attribute has been set.
|
||||
:initialize: This method will be executed exactly once per run (no matter
|
||||
how many instances of the workload there are). It will run
|
||||
after the device has been initialized, so it may be used to
|
||||
perform device-dependent initialization that does not need to
|
||||
be repeated on each iteration (e.g. as installing executables
|
||||
required by the workload on the device).
|
||||
:setup: Everything that needs to be in place for workload execution should
|
||||
be done in this method. This includes copying files to the device,
|
||||
starting up an application, configuring communications channels,
|
||||
@@ -371,13 +448,11 @@ The interface should be implemented as follows
|
||||
to the result (see below).
|
||||
:teardown: This could be used to perform any cleanup you may wish to do,
|
||||
e.g. Uninstalling applications, deleting file on the device, etc.
|
||||
:finalize: This is the complement to ``initialize``. This will be executed
|
||||
exactly once at the end of the run. This should be used to
|
||||
perform any final clean up (e.g. uninstalling binaries installed
|
||||
in the ``initialize``).
|
||||
|
||||
:validate: This method can be used to validate any assumptions your workload
|
||||
makes about the environment (e.g. that required files are
|
||||
present, environment variables are set, etc) and should raise
|
||||
a :class:`wlauto.exceptions.WorkloadError` if that is not the
|
||||
case. The base class implementation only makes sure sure that
|
||||
the name attribute has been set.
|
||||
|
||||
.. _agenda: agenda.html
|
||||
|
||||
@@ -512,17 +587,17 @@ device name(case sensitive) then followed by a dot '.' then the stage name
|
||||
then '.revent'. All your custom revent files should reside at
|
||||
'~/.workload_automation/dependencies/WORKLOAD NAME/'. These are the current
|
||||
supported stages:
|
||||
|
||||
|
||||
:setup: This stage is where the game is loaded. It is a good place to
|
||||
record revent here to modify the game settings and get it ready
|
||||
to start.
|
||||
:run: This stage is where the game actually starts. This will allow for
|
||||
more accurate results if the revent file for this stage only
|
||||
records the game being played.
|
||||
|
||||
|
||||
For instance, to add a custom revent files for a device named mydevice and
|
||||
a workload name mygame, you create a new directory called mygame in
|
||||
'~/.workload_automation/dependencies/'. Then you add the revent files for
|
||||
a workload name mygame, you create a new directory called mygame in
|
||||
'~/.workload_automation/dependencies/'. Then you add the revent files for
|
||||
the stages you want in ~/.workload_automation/dependencies/mygame/::
|
||||
|
||||
mydevice.setup.revent
|
||||
@@ -531,7 +606,7 @@ the stages you want in ~/.workload_automation/dependencies/mygame/::
|
||||
Any revent file in the dependencies will always overwrite the revent file in the
|
||||
workload directory. So it is possible for example to just provide one revent for
|
||||
setup in the dependencies and use the run.revent that is in the workload directory.
|
||||
|
||||
|
||||
Adding an Instrument
|
||||
====================
|
||||
|
||||
@@ -576,7 +651,7 @@ which is perhaps ``initialize`` that gets invoked after the device has been
|
||||
initialised for the first time, and can be used to perform one-time setup (e.g.
|
||||
copying files to the device -- there is no point in doing that for each
|
||||
iteration). The full list of available methods can be found in
|
||||
:ref:`Signals Documentation <instrument_name_mapping>`.
|
||||
:ref:`Signals Documentation <instrumentation_method_map>`.
|
||||
|
||||
|
||||
Prioritization
|
||||
@@ -727,19 +802,19 @@ table::
|
||||
with open(outfile, 'w') as wfh:
|
||||
write_table(rows, wfh)
|
||||
|
||||
|
||||
|
||||
Adding a Resource Getter
|
||||
========================
|
||||
|
||||
A resource getter is a new extension type added in version 2.1.3. A resource
|
||||
getter implement a method of acquiring resources of a particular type (such as
|
||||
APK files or additional workload assets). Resource getters are invoked in
|
||||
priority order until one returns the desired resource.
|
||||
priority order until one returns the desired resource.
|
||||
|
||||
If you want WA to look for resources somewhere it doesn't by default (e.g. you
|
||||
have a repository of APK files), you can implement a getter for the resource and
|
||||
register it with a higher priority than the standard WA getters, so that it gets
|
||||
invoked first.
|
||||
invoked first.
|
||||
|
||||
Instances of a resource getter should implement the following interface::
|
||||
|
||||
@@ -751,7 +826,7 @@ Instances of a resource getter should implement the following interface::
|
||||
|
||||
def get(self, resource, **kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
The getter should define a name (as with all extensions), a resource
|
||||
type, which should be a string, e.g. ``'jar'``, and a priority (see `Getter
|
||||
Prioritization`_ below). In addition, ``get`` method should be implemented. The
|
||||
@@ -823,7 +898,7 @@ looks for the file under
|
||||
elif not found_files:
|
||||
return None
|
||||
else:
|
||||
raise ResourceError('More than one .apk found in {} for {}.'.format(resource_dir,
|
||||
raise ResourceError('More than one .apk found in {} for {}.'.format(resource_dir,
|
||||
resource.owner.name))
|
||||
|
||||
.. _adding_a_device:
|
||||
@@ -923,7 +998,7 @@ top-level package directory is created by default, and it is OK to have
|
||||
everything in there.
|
||||
|
||||
.. note:: When discovering extensions thorugh this mechanism, WA traveries the
|
||||
Python module/submodule tree, not the directory strucuter, therefore,
|
||||
Python module/submodule tree, not the directory strucuter, therefore,
|
||||
if you are going to create subdirectories under the top level dictory
|
||||
created for you, it is important that your make sure they are valid
|
||||
Python packages; i.e. each subdirectory must contain a __init__.py
|
||||
@@ -934,7 +1009,7 @@ At this stage, you may want to edit ``params`` structure near the bottom of
|
||||
the ``setup.py`` to add correct author, license and contact information (see
|
||||
"Writing the Setup Script" section in standard Python documentation for
|
||||
details). You may also want to add a README and/or a COPYING file at the same
|
||||
level as the setup.py. Once you have the contents of your package sorted,
|
||||
level as the setup.py. Once you have the contents of your package sorted,
|
||||
you can generate the package by running ::
|
||||
|
||||
cd my_wa_exts
|
||||
|
@@ -16,7 +16,7 @@
|
||||
#
|
||||
[MASTER]
|
||||
|
||||
profile=no
|
||||
#profile=no
|
||||
|
||||
ignore=external
|
||||
|
||||
@@ -41,7 +41,9 @@ ignore=external
|
||||
# https://bitbucket.org/logilab/pylint/issue/272/anomalous-backslash-in-string-for-raw
|
||||
# C0330: bad continuation, due to:
|
||||
# https://bitbucket.org/logilab/pylint/issue/232/wrong-hanging-indentation-false-positive
|
||||
disable=C0301,C0103,C0111,W0142,R0903,R0904,R0922,W0511,W0141,I0011,R0921,W1401,C0330
|
||||
# TODO: disabling no-value-for-parameter and logging-format-interpolation, as they appear to be broken
|
||||
# in version 1.4.1 and return a lot of false postives; should be re-enabled once fixed.
|
||||
disable=C0301,C0103,C0111,W0142,R0903,R0904,R0922,W0511,W0141,I0011,R0921,W1401,C0330,no-value-for-parameter,logging-format-interpolation
|
||||
|
||||
[FORMAT]
|
||||
max-module-lines=4000
|
||||
|
17
scripts/cpustates
Normal file
17
scripts/cpustates
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from wlauto.utils.power import main
|
||||
main()
|
@@ -1,17 +1,17 @@
|
||||
#!/bin/bash
|
||||
# $Copyright:
|
||||
# ----------------------------------------------------------------
|
||||
# This confidential and proprietary software may be used only as
|
||||
# authorised by a licensing agreement from ARM Limited
|
||||
# (C) COPYRIGHT 2013 ARM Limited
|
||||
# ALL RIGHTS RESERVED
|
||||
# The entire notice above must be reproduced on all authorised
|
||||
# copies and copies may only be made to the extent permitted
|
||||
# by a licensing agreement from ARM Limited.
|
||||
# ----------------------------------------------------------------
|
||||
# File: create_workload
|
||||
# ----------------------------------------------------------------
|
||||
# $
|
||||
# Copyright 2013-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
wa create workload $@
|
||||
|
||||
|
@@ -1,16 +1,16 @@
|
||||
#!/bin/bash
|
||||
# $Copyright:
|
||||
# ----------------------------------------------------------------
|
||||
# This confidential and proprietary software may be used only as
|
||||
# authorised by a licensing agreement from ARM Limited
|
||||
# (C) COPYRIGHT 2013 ARM Limited
|
||||
# ALL RIGHTS RESERVED
|
||||
# The entire notice above must be reproduced on all authorised
|
||||
# copies and copies may only be made to the extent permitted
|
||||
# by a licensing agreement from ARM Limited.
|
||||
# ----------------------------------------------------------------
|
||||
# File: list_extensions
|
||||
# ----------------------------------------------------------------
|
||||
# $
|
||||
# Copyright 2013-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
wa list $@
|
||||
|
@@ -1,17 +1,17 @@
|
||||
#!/bin/bash
|
||||
# $Copyright:
|
||||
# ----------------------------------------------------------------
|
||||
# This confidential and proprietary software may be used only as
|
||||
# authorised by a licensing agreement from ARM Limited
|
||||
# (C) COPYRIGHT 2013 ARM Limited
|
||||
# ALL RIGHTS RESERVED
|
||||
# The entire notice above must be reproduced on all authorised
|
||||
# copies and copies may only be made to the extent permitted
|
||||
# by a licensing agreement from ARM Limited.
|
||||
# ----------------------------------------------------------------
|
||||
# File: run_workloads
|
||||
# ----------------------------------------------------------------
|
||||
# $
|
||||
# Copyright 2013-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
wa run $@
|
||||
|
||||
|
26
scripts/wa
26
scripts/wa
@@ -1,17 +1,17 @@
|
||||
#!/usr/bin/env python
|
||||
# $Copyright:
|
||||
# ----------------------------------------------------------------
|
||||
# This confidential and proprietary software may be used only as
|
||||
# authorised by a licensing agreement from ARM Limited
|
||||
# (C) COPYRIGHT 2013 ARM Limited
|
||||
# ALL RIGHTS RESERVED
|
||||
# The entire notice above must be reproduced on all authorised
|
||||
# copies and copies may only be made to the extent permitted
|
||||
# by a licensing agreement from ARM Limited.
|
||||
# ----------------------------------------------------------------
|
||||
# File: run_workloads
|
||||
# ----------------------------------------------------------------
|
||||
# $
|
||||
# Copyright 2013-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from wlauto.core.entry_point import main
|
||||
main()
|
||||
|
12
setup.py
12
setup.py
@@ -23,7 +23,10 @@ try:
|
||||
except ImportError:
|
||||
from distutils.core import setup
|
||||
|
||||
sys.path.insert(0, './wlauto/core/')
|
||||
|
||||
wlauto_dir = os.path.join(os.path.dirname(__file__), 'wlauto')
|
||||
|
||||
sys.path.insert(0, os.path.join(wlauto_dir, 'core'))
|
||||
from version import get_wa_version
|
||||
|
||||
# happends if falling back to distutils
|
||||
@@ -38,7 +41,7 @@ except OSError:
|
||||
packages = []
|
||||
data_files = {}
|
||||
source_dir = os.path.dirname(__file__)
|
||||
for root, dirs, files in os.walk('wlauto'):
|
||||
for root, dirs, files in os.walk(wlauto_dir):
|
||||
rel_dir = os.path.relpath(root, source_dir)
|
||||
data = []
|
||||
if '__init__.py' in files:
|
||||
@@ -63,7 +66,7 @@ params = dict(
|
||||
packages=packages,
|
||||
package_data=data_files,
|
||||
scripts=scripts,
|
||||
url='N/A',
|
||||
url='http://github.com/arm-sowftware/workload-automation',
|
||||
license='Apache v2',
|
||||
maintainer='ARM Architecture & Technology Device Lab',
|
||||
maintainer_email='workload-automation@arm.com',
|
||||
@@ -73,11 +76,14 @@ params = dict(
|
||||
'pyserial', # Serial port interface
|
||||
'colorama', # Printing with colors
|
||||
'pyYAML', # YAML-formatted agenda parsing
|
||||
'requests', # Fetch assets over HTTP
|
||||
],
|
||||
extras_require={
|
||||
'other': ['jinja2', 'pandas>=0.13.1'],
|
||||
'statedetect': ['numpy', 'imutils', 'opencv-python'],
|
||||
'test': ['nose'],
|
||||
'mongodb': ['pymongo'],
|
||||
'notify': ['notify2'],
|
||||
'doc': ['sphinx'],
|
||||
},
|
||||
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
|
@@ -29,7 +29,7 @@ from wlauto.common.linux.device import LinuxDevice # NOQA
|
||||
from wlauto.common.android.device import AndroidDevice, BigLittleDevice # NOQA
|
||||
from wlauto.common.android.resources import ApkFile, JarFile
|
||||
from wlauto.common.android.workload import (UiAutomatorWorkload, ApkWorkload, AndroidBenchmark, # NOQA
|
||||
AndroidUiAutoBenchmark, GameWorkload) # NOQA
|
||||
AndroidUiAutoBenchmark, AndroidUxPerfWorkload, GameWorkload) # NOQA
|
||||
|
||||
from wlauto.core.version import get_wa_version
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
# This agenda specifies configuration that may be used for regression runs
|
||||
# on big.LITTLE systems. This agenda will with a TC2 device configured as
|
||||
# described in the documentation.
|
||||
# on big.LITTLE systems. This agenda will work with a TC2 device configured
|
||||
# as described in the documentation.
|
||||
config:
|
||||
device: tc2
|
||||
run_name: big.LITTLE_regression
|
||||
@@ -69,7 +69,7 @@ workloads:
|
||||
- id: b10
|
||||
name: smartbench
|
||||
- id: b11
|
||||
name: sqlite
|
||||
name: sqlitebm
|
||||
- id: b12
|
||||
name: vellamo
|
||||
|
||||
|
@@ -12,5 +12,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
|
@@ -15,15 +15,20 @@
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import string
|
||||
import textwrap
|
||||
import argparse
|
||||
import shutil
|
||||
import getpass
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
|
||||
import yaml
|
||||
|
||||
from wlauto import ExtensionLoader, Command, settings
|
||||
from wlauto.exceptions import CommandError
|
||||
from wlauto.exceptions import CommandError, ConfigError
|
||||
from wlauto.utils.cli import init_argument_parser
|
||||
from wlauto.utils.misc import (capitalize, check_output,
|
||||
ensure_file_directory_exists as _f, ensure_directory_exists as _d)
|
||||
@@ -169,15 +174,105 @@ class CreatePackageSubcommand(CreateSubcommand):
|
||||
touch(os.path.join(actual_package_path, '__init__.py'))
|
||||
|
||||
|
||||
class CreateAgendaSubcommand(CreateSubcommand):
|
||||
|
||||
name = 'agenda'
|
||||
description = """
|
||||
Create an agenda whith the specified extensions enabled. And parameters set to their
|
||||
default values.
|
||||
"""
|
||||
|
||||
def initialize(self):
|
||||
self.parser.add_argument('extensions', nargs='+',
|
||||
help='Extensions to be added')
|
||||
self.parser.add_argument('-i', '--iterations', type=int, default=1,
|
||||
help='Sets the number of iterations for all workloads')
|
||||
self.parser.add_argument('-r', '--include-runtime-params', action='store_true',
|
||||
help="""
|
||||
Adds runtime parameters to the global section of the generated
|
||||
agenda. Note: these do not have default values, so only name
|
||||
will be added. Also, runtime parameters are devices-specific, so
|
||||
a device must be specified (either in the list of extensions,
|
||||
or in the existing config).
|
||||
""")
|
||||
self.parser.add_argument('-o', '--output', metavar='FILE',
|
||||
help='Output file. If not specfied, STDOUT will be used instead.')
|
||||
|
||||
def execute(self, args): # pylint: disable=no-self-use,too-many-branches,too-many-statements
|
||||
loader = ExtensionLoader(packages=settings.extension_packages,
|
||||
paths=settings.extension_paths)
|
||||
agenda = OrderedDict()
|
||||
agenda['config'] = OrderedDict(instrumentation=[], result_processors=[])
|
||||
agenda['global'] = OrderedDict(iterations=args.iterations)
|
||||
agenda['workloads'] = []
|
||||
device = None
|
||||
device_config = None
|
||||
for name in args.extensions:
|
||||
extcls = loader.get_extension_class(name)
|
||||
config = loader.get_default_config(name)
|
||||
del config['modules']
|
||||
|
||||
if extcls.kind == 'workload':
|
||||
entry = OrderedDict()
|
||||
entry['name'] = extcls.name
|
||||
if name != extcls.name:
|
||||
entry['label'] = name
|
||||
entry['params'] = config
|
||||
agenda['workloads'].append(entry)
|
||||
elif extcls.kind == 'device':
|
||||
if device is not None:
|
||||
raise ConfigError('Specifying multiple devices: {} and {}'.format(device.name, name))
|
||||
device = extcls
|
||||
device_config = config
|
||||
agenda['config']['device'] = name
|
||||
agenda['config']['device_config'] = config
|
||||
else:
|
||||
if extcls.kind == 'instrument':
|
||||
agenda['config']['instrumentation'].append(name)
|
||||
if extcls.kind == 'result_processor':
|
||||
agenda['config']['result_processors'].append(name)
|
||||
agenda['config'][name] = config
|
||||
|
||||
if args.include_runtime_params:
|
||||
if not device:
|
||||
if settings.device:
|
||||
device = loader.get_extension_class(settings.device)
|
||||
device_config = loader.get_default_config(settings.device)
|
||||
else:
|
||||
raise ConfigError('-r option requires for a device to be in the list of extensions')
|
||||
rps = OrderedDict()
|
||||
for rp in device.runtime_parameters:
|
||||
if hasattr(rp, 'get_runtime_parameters'):
|
||||
# a core parameter needs to be expanded for each of the
|
||||
# device's cores, if they're avialable
|
||||
for crp in rp.get_runtime_parameters(device_config.get('core_names', [])):
|
||||
rps[crp.name] = None
|
||||
else:
|
||||
rps[rp.name] = None
|
||||
agenda['global']['runtime_params'] = rps
|
||||
|
||||
if args.output:
|
||||
wfh = open(args.output, 'w')
|
||||
else:
|
||||
wfh = sys.stdout
|
||||
yaml.dump(agenda, wfh, indent=4, default_flow_style=False)
|
||||
if args.output:
|
||||
wfh.close()
|
||||
|
||||
|
||||
class CreateCommand(Command):
|
||||
|
||||
name = 'create'
|
||||
description = '''Used to create various WA-related objects (see positional arguments list for what
|
||||
objects may be created).\n\nUse "wa create <object> -h" for object-specific arguments.'''
|
||||
formatter_class = argparse.RawDescriptionHelpFormatter
|
||||
subcmd_classes = [CreateWorkloadSubcommand, CreatePackageSubcommand]
|
||||
subcmd_classes = [
|
||||
CreateWorkloadSubcommand,
|
||||
CreatePackageSubcommand,
|
||||
CreateAgendaSubcommand,
|
||||
]
|
||||
|
||||
def initialize(self):
|
||||
def initialize(self, context):
|
||||
subparsers = self.parser.add_subparsers(dest='what')
|
||||
self.subcommands = [] # pylint: disable=W0201
|
||||
for subcmd_cls in self.subcmd_classes:
|
||||
@@ -257,7 +352,12 @@ def create_uiauto_project(path, name, target='1'):
|
||||
package_name,
|
||||
target,
|
||||
path)
|
||||
check_output(command, shell=True)
|
||||
try:
|
||||
check_output(command, shell=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if 'is is not valid' in e.output:
|
||||
message = 'No Android SDK target found; have you run "{} update sdk" and download a platform?'
|
||||
raise CommandError(message.format(android_path))
|
||||
|
||||
build_script = os.path.join(path, 'build.sh')
|
||||
with open(build_script, 'w') as wfh:
|
||||
@@ -296,5 +396,5 @@ def render_template(name, params):
|
||||
|
||||
|
||||
def touch(path):
|
||||
with open(path, 'w') as wfh: # pylint: disable=unused-variable
|
||||
with open(path, 'w') as _:
|
||||
pass
|
||||
|
122
wlauto/commands/get_assets.py
Normal file
122
wlauto/commands/get_assets.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# Copyright 2014-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from requests import ConnectionError, RequestException
|
||||
|
||||
from wlauto import File, ExtensionLoader, Command, settings
|
||||
from wlauto.core.extension import Extension
|
||||
|
||||
|
||||
REMOTE_ASSETS_URL = 'https://github.com/ARM-software/workload-automation-assets/raw/master/dependencies'
|
||||
|
||||
|
||||
class GetAssetsCommand(Command):
|
||||
name = 'get-assets'
|
||||
description = '''
|
||||
This command downloads external extension dependencies used by Workload Automation.
|
||||
Works by first downloading a directory index of the assets, then iterating through
|
||||
it to get assets for the specified extensions.
|
||||
'''
|
||||
|
||||
# Uses config setting if available otherwise defaults to ARM-software repo
|
||||
# Can be overriden with the --url argument
|
||||
assets_url = settings.remote_assets_url or REMOTE_ASSETS_URL
|
||||
|
||||
def initialize(self, context):
|
||||
self.parser.add_argument('-f', '--force', action='store_true',
|
||||
help='Always fetch the assets, even if matching versions exist in local cache.')
|
||||
self.parser.add_argument('--url', metavar='URL', type=not_empty, default=self.assets_url,
|
||||
help='''The location from which to download the files. If not provided,
|
||||
config setting ``remote_assets_url`` will be used if available, else
|
||||
uses the default REMOTE_ASSETS_URL parameter in the script.''')
|
||||
group = self.parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument('-a', '--all', action='store_true',
|
||||
help='Download assets for all extensions found in the index. Cannot be used with -e.')
|
||||
group.add_argument('-e', dest='exts', metavar='EXT', nargs='+', type=not_empty,
|
||||
help='One or more extensions whose assets to download. Cannot be used with --all.')
|
||||
|
||||
def execute(self, args):
|
||||
self.logger.debug('Program arguments: {}'.format(vars(args)))
|
||||
if args.force:
|
||||
self.logger.info('Force-download of assets requested')
|
||||
if not args.url:
|
||||
self.logger.debug('URL not provided, falling back to default setting in config')
|
||||
self.logger.info('Downloading external assets from {}'.format(args.url))
|
||||
|
||||
# Get file index of assets
|
||||
ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths)
|
||||
getter = ext_loader.get_resource_getter('http_assets', None, url=args.url, always_fetch=args.force)
|
||||
try:
|
||||
getter.index = getter.fetch_index()
|
||||
except (ConnectionError, RequestException) as e:
|
||||
self.exit_with_error(str(e))
|
||||
all_assets = dict()
|
||||
for k, v in getter.index.iteritems():
|
||||
all_assets[str(k)] = [str(asset['path']) for asset in v]
|
||||
|
||||
# Here we get a list of all extensions present in the current WA installation,
|
||||
# and cross-check that against the list of extensions whose assets are requested.
|
||||
# The aim is to avoid downloading assets for extensions that do not exist, since
|
||||
# WA extensions and asset index can be updated independently and go out of sync.
|
||||
all_extensions = [ext.name for ext in ext_loader.list_extensions()]
|
||||
assets_to_get = set(all_assets).intersection(all_extensions)
|
||||
if args.exts:
|
||||
assets_to_get = assets_to_get.intersection(args.exts)
|
||||
# Check list is not empty
|
||||
if not assets_to_get:
|
||||
if args.all:
|
||||
self.exit_with_error('Could not find extensions: {}'.format(', '.join(all_assets.keys())))
|
||||
else: # args.exts
|
||||
self.exit_with_error('Asset index has no entries for: {}'.format(', '.join(args.exts)))
|
||||
|
||||
# Check out of sync extensions i.e. do not exist in both WA and assets index
|
||||
missing = set(all_assets).difference(all_extensions) | set(args.exts or []).difference(all_assets)
|
||||
if missing:
|
||||
self.logger.warning('Not getting assets for missing extensions: {}'.format(', '.join(missing)))
|
||||
|
||||
# Ideally the extension loader would be used to instantiate, but it does full
|
||||
# validation of the extension, like checking connected devices or supported
|
||||
# platform(s). This info might be unavailable and is not required to download
|
||||
# assets, since they are classified by extension name alone. So instead we use
|
||||
# a simple subclass of ``Extension`` providing a valid ``name`` attribute.
|
||||
for ext_name in assets_to_get:
|
||||
owner = _instantiate(NamedExtension, ext_name)
|
||||
self.logger.info('Getting assets for: {}'.format(ext_name))
|
||||
for asset in all_assets[ext_name]:
|
||||
getter.get(File(owner, asset)) # Download the files
|
||||
|
||||
def exit_with_error(self, message, code=1):
|
||||
self.logger.error(message)
|
||||
sys.exit(code)
|
||||
|
||||
|
||||
class NamedExtension(Extension):
|
||||
def __init__(self, name, **kwargs):
|
||||
super(NamedExtension, self).__init__(**kwargs)
|
||||
self.name = name
|
||||
|
||||
|
||||
def not_empty(val):
|
||||
if val:
|
||||
return val
|
||||
else:
|
||||
raise argparse.ArgumentTypeError('Extension name cannot be blank')
|
||||
|
||||
|
||||
def _instantiate(cls, *args, **kwargs):
|
||||
return cls(*args, **kwargs)
|
@@ -24,22 +24,33 @@ class ListCommand(Command):
|
||||
name = 'list'
|
||||
description = 'List available WA extensions with a short description of each.'
|
||||
|
||||
def initialize(self):
|
||||
def initialize(self, context):
|
||||
extension_types = ['{}s'.format(ext.name) for ext in settings.extensions]
|
||||
self.parser.add_argument('kind', metavar='KIND',
|
||||
help=('Specify the kind of extension to list. Must be '
|
||||
'one of: {}'.format(', '.join(extension_types))),
|
||||
choices=extension_types)
|
||||
self.parser.add_argument('-n', '--name', help='Filter results by the name specified')
|
||||
self.parser.add_argument('-o', '--packaged-only', action='store_true',
|
||||
help='''
|
||||
Only list extensions packaged with WA itself. Do not list extensions
|
||||
installed locally or from other packages.
|
||||
''')
|
||||
self.parser.add_argument('-p', '--platform', help='Only list results that are supported by '
|
||||
'the specified platform')
|
||||
|
||||
def execute(self, args):
|
||||
filters = {}
|
||||
if args.name:
|
||||
filters['name'] = args.name
|
||||
|
||||
ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths)
|
||||
if args.packaged_only:
|
||||
ext_loader = ExtensionLoader()
|
||||
else:
|
||||
ext_loader = ExtensionLoader(packages=settings.extension_packages,
|
||||
paths=settings.extension_paths)
|
||||
results = ext_loader.list_extensions(args.kind[:-1])
|
||||
if filters:
|
||||
if filters or args.platform:
|
||||
filtered_results = []
|
||||
for result in results:
|
||||
passed = True
|
||||
@@ -47,6 +58,8 @@ class ListCommand(Command):
|
||||
if getattr(result, k) != v:
|
||||
passed = False
|
||||
break
|
||||
if passed and args.platform:
|
||||
passed = check_platform(result, args.platform)
|
||||
if passed:
|
||||
filtered_results.append(result)
|
||||
else: # no filters specified
|
||||
@@ -57,3 +70,10 @@ class ListCommand(Command):
|
||||
for result in sorted(filtered_results, key=lambda x: x.name):
|
||||
output.add_item(get_summary(result), result.name)
|
||||
print output.format_data()
|
||||
|
||||
|
||||
def check_platform(extension, platform):
|
||||
supported_platforms = getattr(extension, 'supported_platforms', [])
|
||||
if supported_platforms:
|
||||
return platform in supported_platforms
|
||||
return True
|
||||
|
211
wlauto/commands/record.py
Normal file
211
wlauto/commands/record.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Copyright 2014-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import signal
|
||||
from math import ceil
|
||||
|
||||
from wlauto import ExtensionLoader, Command, settings
|
||||
from wlauto.common.resources import Executable
|
||||
from wlauto.core.resource import NO_ONE
|
||||
from wlauto.core.resolver import ResourceResolver
|
||||
from wlauto.core.configuration import RunConfiguration
|
||||
from wlauto.core.agenda import Agenda
|
||||
from wlauto.utils.revent import ReventRecording, GAMEPAD_MODE
|
||||
|
||||
|
||||
class ReventCommand(Command):
|
||||
|
||||
# Validate command options
|
||||
def validate_args(self, args):
|
||||
if args.clear and not args.package:
|
||||
print "Package must be specified if you want to clear cache\n"
|
||||
self.parser.print_help()
|
||||
sys.exit()
|
||||
|
||||
# pylint: disable=W0201
|
||||
def execute(self, args):
|
||||
self.validate_args(args)
|
||||
self.logger.info("Connecting to device...")
|
||||
|
||||
ext_loader = ExtensionLoader(packages=settings.extension_packages,
|
||||
paths=settings.extension_paths)
|
||||
|
||||
# Setup config
|
||||
self.config = RunConfiguration(ext_loader)
|
||||
for filepath in settings.get_config_paths():
|
||||
self.config.load_config(filepath)
|
||||
self.config.set_agenda(Agenda())
|
||||
self.config.finalize()
|
||||
|
||||
context = LightContext(self.config)
|
||||
|
||||
# Setup device
|
||||
self.device = ext_loader.get_device(settings.device, **settings.device_config)
|
||||
self.device.validate()
|
||||
self.device.dynamic_modules = []
|
||||
self.device.connect()
|
||||
self.device.initialize(context)
|
||||
|
||||
host_binary = context.resolver.get(Executable(NO_ONE, self.device.abi, 'revent'))
|
||||
self.target_binary = self.device.install_executable(host_binary)
|
||||
|
||||
self.run(args)
|
||||
|
||||
def run(self, args):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class RecordCommand(ReventCommand):
|
||||
|
||||
name = 'record'
|
||||
description = '''Performs a revent recording
|
||||
|
||||
This command helps making revent recordings. It will automatically
|
||||
deploy revent and even has the option of automatically opening apps.
|
||||
|
||||
Revent allows you to record raw inputs such as screen swipes or button presses.
|
||||
This can be useful for recording inputs for workloads such as games that don't
|
||||
have XML UI layouts that can be used with UIAutomator. As a drawback from this,
|
||||
revent recordings are specific to the device type they were recorded on.
|
||||
|
||||
WA uses two parts to the names of revent recordings in the format,
|
||||
{device_name}.{suffix}.revent.
|
||||
|
||||
- device_name can either be specified manually with the ``-d`` argument or
|
||||
it can be automatically determined. On Android device it will be obtained
|
||||
from ``build.prop``, on Linux devices it is obtained from ``/proc/device-tree/model``.
|
||||
- suffix is used by WA to determine which part of the app execution the
|
||||
recording is for, currently these are either ``setup`` or ``run``. This
|
||||
should be specified with the ``-s`` argument.
|
||||
|
||||
|
||||
**gamepad recording**
|
||||
|
||||
revent supports an alternative recording mode, where it will record events
|
||||
from a single gamepad device. In this mode, revent will store the
|
||||
description of this device as a part of the recording. When replaying such
|
||||
a recording, revent will first create a virtual gamepad using the
|
||||
description, and will replay the events into it, so a physical controller
|
||||
does not need to be connected on replay. Unlike standard revent recordings,
|
||||
recordings generated in this mode should be (to an extent) portable across
|
||||
different devices.
|
||||
|
||||
note:
|
||||
|
||||
- The device on which a recording is being made in gamepad mode, must have
|
||||
exactly one gamepad connected to it.
|
||||
- The device on which a gamepad recording is being replayed must have
|
||||
/dev/uinput enabled in the kernel (this interface is necessary to create
|
||||
virtual gamepad).
|
||||
|
||||
'''
|
||||
|
||||
def initialize(self, context):
|
||||
self.context = context
|
||||
self.parser.add_argument('-d', '--device', help='The name of the device')
|
||||
self.parser.add_argument('-s', '--suffix', help='The suffix of the revent file, e.g. ``setup``')
|
||||
self.parser.add_argument('-o', '--output', help='Directory to save the recording in')
|
||||
self.parser.add_argument('-p', '--package', help='Package to launch before recording')
|
||||
self.parser.add_argument('-g', '--gamepad', help='Record from a gamepad rather than all devices.',
|
||||
action="store_true")
|
||||
self.parser.add_argument('-C', '--clear', help='Clear app cache before launching it',
|
||||
action="store_true")
|
||||
self.parser.add_argument('-S', '--capture-screen', help='Record a screen capture after recording',
|
||||
action="store_true")
|
||||
|
||||
def run(self, args):
|
||||
if args.device:
|
||||
device_name = args.device
|
||||
else:
|
||||
device_name = self.device.get_device_model()
|
||||
|
||||
if args.suffix:
|
||||
args.suffix += "."
|
||||
|
||||
revent_file = self.device.path.join(self.device.working_directory,
|
||||
'{}.{}revent'.format(device_name, args.suffix or ""))
|
||||
|
||||
if args.clear:
|
||||
self.device.execute("pm clear {}".format(args.package))
|
||||
|
||||
if args.package:
|
||||
self.logger.info("Starting {}".format(args.package))
|
||||
self.device.execute('monkey -p {} -c android.intent.category.LAUNCHER 1'.format(args.package))
|
||||
|
||||
self.logger.info("Press Enter when you are ready to record...")
|
||||
raw_input("")
|
||||
gamepad_flag = '-g ' if args.gamepad else ''
|
||||
command = "{} record {}-s {}".format(self.target_binary, gamepad_flag, revent_file)
|
||||
self.device.kick_off(command)
|
||||
|
||||
self.logger.info("Press Enter when you have finished recording...")
|
||||
raw_input("")
|
||||
if args.capture_screen:
|
||||
self.logger.info("Recording screen capture")
|
||||
self.device.capture_screen(args.output or os.getcwdu())
|
||||
self.device.killall("revent", signal.SIGINT)
|
||||
self.logger.info("Waiting for revent to finish")
|
||||
while self.device.get_pids_of("revent"):
|
||||
pass
|
||||
self.logger.info("Pulling files from device")
|
||||
self.device.pull_file(revent_file, args.output or os.getcwdu())
|
||||
|
||||
|
||||
class ReplayCommand(ReventCommand):
|
||||
|
||||
name = 'replay'
|
||||
description = '''Replay a revent recording
|
||||
|
||||
Revent allows you to record raw inputs such as screen swipes or button presses.
|
||||
See ``wa show record`` to see how to make an revent recording.
|
||||
'''
|
||||
|
||||
def initialize(self, context):
|
||||
self.context = context
|
||||
self.parser.add_argument('revent', help='The name of the file to replay')
|
||||
self.parser.add_argument('-p', '--package', help='Package to launch before recording')
|
||||
self.parser.add_argument('-C', '--clear', help='Clear app cache before launching it',
|
||||
action="store_true")
|
||||
|
||||
# pylint: disable=W0201
|
||||
def run(self, args):
|
||||
self.logger.info("Pushing file to device")
|
||||
self.device.push_file(args.revent, self.device.working_directory)
|
||||
revent_file = self.device.path.join(self.device.working_directory, os.path.split(args.revent)[1])
|
||||
|
||||
if args.clear:
|
||||
self.device.execute("pm clear {}".format(args.package))
|
||||
|
||||
if args.package:
|
||||
self.logger.info("Starting {}".format(args.package))
|
||||
self.device.execute('monkey -p {} -c android.intent.category.LAUNCHER 1'.format(args.package))
|
||||
|
||||
self.logger.info("Replaying recording")
|
||||
command = "{} replay {}".format(self.target_binary, revent_file)
|
||||
recording = ReventRecording(args.revent)
|
||||
timeout = ceil(recording.duration) + 30
|
||||
recording.close()
|
||||
self.device.execute(command, timeout=timeout,
|
||||
as_root=(recording.mode == GAMEPAD_MODE))
|
||||
self.logger.info("Finished replay")
|
||||
|
||||
|
||||
# Used to satisfy the API
|
||||
class LightContext(object):
|
||||
def __init__(self, config):
|
||||
self.resolver = ResourceResolver(config)
|
||||
self.resolver.load()
|
@@ -20,6 +20,7 @@ import shutil
|
||||
|
||||
import wlauto
|
||||
from wlauto import Command, settings
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.core.agenda import Agenda
|
||||
from wlauto.core.execution import Executor
|
||||
from wlauto.utils.log import add_log_file
|
||||
@@ -30,24 +31,43 @@ class RunCommand(Command):
|
||||
name = 'run'
|
||||
description = 'Execute automated workloads on a remote device and process the resulting output.'
|
||||
|
||||
def initialize(self):
|
||||
def initialize(self, context):
|
||||
self.parser.add_argument('agenda', metavar='AGENDA',
|
||||
help='Agenda for this workload automation run. This defines which workloads will ' +
|
||||
'be executed, how many times, with which tunables, etc. ' +
|
||||
'See example agendas in {} '.format(os.path.dirname(wlauto.__file__)) +
|
||||
'for an example of how this file should be structured.')
|
||||
help="""
|
||||
Agenda for this workload automation run. This defines which
|
||||
workloads will be executed, how many times, with which
|
||||
tunables, etc. See example agendas in {} for an example of
|
||||
how this file should be structured.
|
||||
""".format(os.path.dirname(wlauto.__file__)))
|
||||
self.parser.add_argument('-d', '--output-directory', metavar='DIR', default=None,
|
||||
help='Specify a directory where the output will be generated. If the directory' +
|
||||
'already exists, the script will abort unless -f option (see below) is used,' +
|
||||
'in which case the contents of the directory will be overwritten. If this option' +
|
||||
'is not specified, then {} will be used instead.'.format(settings.output_directory))
|
||||
help="""
|
||||
Specify a directory where the output will be generated. If
|
||||
the directory already exists, the script will abort unless -f
|
||||
option (see below) is used, in which case the contents of the
|
||||
directory will be overwritten. If this option is not specified,
|
||||
then {} will be used instead.
|
||||
""".format(settings.output_directory))
|
||||
self.parser.add_argument('-f', '--force', action='store_true',
|
||||
help='Overwrite output directory if it exists. By default, the script will abort in this' +
|
||||
'situation to prevent accidental data loss.')
|
||||
help="""
|
||||
Overwrite output directory if it exists. By default, the script
|
||||
will abort in this situation to prevent accidental data loss.
|
||||
""")
|
||||
self.parser.add_argument('-i', '--id', action='append', dest='only_run_ids', metavar='ID',
|
||||
help='Specify a workload spec ID from an agenda to run. If this is specified, only that particular ' +
|
||||
'spec will be run, and other workloads in the agenda will be ignored. This option may be used to ' +
|
||||
'specify multiple IDs.')
|
||||
help="""
|
||||
Specify a workload spec ID from an agenda to run. If this is
|
||||
specified, only that particular spec will be run, and other
|
||||
workloads in the agenda will be ignored. This option may be
|
||||
used to specify multiple IDs.
|
||||
""")
|
||||
self.parser.add_argument('--disable', action='append', dest='instruments_to_disable',
|
||||
metavar='INSTRUMENT', help="""
|
||||
Specify an instrument to disable from the command line. This
|
||||
equivalent to adding "~{metavar}" to the instrumentation list in
|
||||
the agenda. This can be used to temporarily disable a troublesome
|
||||
instrument for a particular run without introducing permanent
|
||||
change to the config (which one might then forget to revert).
|
||||
This option may be specified multiple times.
|
||||
""")
|
||||
|
||||
def execute(self, args): # NOQA
|
||||
self.set_up_output_directory(args)
|
||||
@@ -57,14 +77,28 @@ class RunCommand(Command):
|
||||
agenda = Agenda(args.agenda)
|
||||
settings.agenda = args.agenda
|
||||
shutil.copy(args.agenda, settings.meta_directory)
|
||||
|
||||
if len(agenda.workloads) == 0:
|
||||
raise ConfigError("No workloads specified")
|
||||
elif '.' in args.agenda or os.sep in args.agenda:
|
||||
raise ConfigError('Agenda "{}" does not exist.'.format(args.agenda))
|
||||
else:
|
||||
self.logger.debug('{} is not a file; assuming workload name.'.format(args.agenda))
|
||||
agenda = Agenda()
|
||||
agenda.add_workload_entry(args.agenda)
|
||||
|
||||
file_name = 'config_{}.py'
|
||||
if args.instruments_to_disable:
|
||||
if 'instrumentation' not in agenda.config:
|
||||
agenda.config['instrumentation'] = []
|
||||
for itd in args.instruments_to_disable:
|
||||
self.logger.debug('Updating agenda to disable {}'.format(itd))
|
||||
agenda.config['instrumentation'].append('~{}'.format(itd))
|
||||
|
||||
basename = 'config_'
|
||||
for file_number, path in enumerate(settings.get_config_paths(), 1):
|
||||
shutil.copy(path, os.path.join(settings.meta_directory, file_name.format(file_number)))
|
||||
file_ext = os.path.splitext(path)[1]
|
||||
shutil.copy(path, os.path.join(settings.meta_directory,
|
||||
basename + str(file_number) + file_ext))
|
||||
|
||||
executor = Executor()
|
||||
executor.execute(agenda, selectors={'ids': args.only_run_ids})
|
||||
|
@@ -18,11 +18,11 @@ import sys
|
||||
import subprocess
|
||||
from cStringIO import StringIO
|
||||
|
||||
from terminalsize import get_terminal_size # pylint: disable=import-error
|
||||
from wlauto import Command, ExtensionLoader, settings
|
||||
from wlauto.utils.doc import (get_summary, get_description, get_type_name, format_column, format_body,
|
||||
format_paragraph, indent, strip_inlined_text)
|
||||
from wlauto.utils.misc import get_pager
|
||||
from wlauto.utils.terminalsize import get_terminal_size
|
||||
|
||||
|
||||
class ShowCommand(Command):
|
||||
@@ -33,12 +33,13 @@ class ShowCommand(Command):
|
||||
Display documentation for the specified extension (workload, instrument, etc.).
|
||||
"""
|
||||
|
||||
def initialize(self):
|
||||
def initialize(self, context):
|
||||
self.parser.add_argument('name', metavar='EXTENSION',
|
||||
help='''The name of the extension for which information will
|
||||
be shown.''')
|
||||
|
||||
def execute(self, args):
|
||||
# pylint: disable=unpacking-non-sequence
|
||||
ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths)
|
||||
extension = ext_loader.get_extension_class(args.name)
|
||||
out = StringIO()
|
||||
@@ -47,8 +48,12 @@ class ShowCommand(Command):
|
||||
text = out.getvalue()
|
||||
pager = get_pager()
|
||||
if len(text.split('\n')) > term_height and pager:
|
||||
sp = subprocess.Popen(pager, stdin=subprocess.PIPE)
|
||||
sp.communicate(text)
|
||||
try:
|
||||
sp = subprocess.Popen(pager, stdin=subprocess.PIPE)
|
||||
sp.communicate(text)
|
||||
except OSError:
|
||||
self.logger.warning('Could not use PAGER "{}"'.format(pager))
|
||||
sys.stdout.write(text)
|
||||
else:
|
||||
sys.stdout.write(text)
|
||||
|
||||
@@ -58,6 +63,9 @@ def format_extension(extension, out, width):
|
||||
out.write('\n')
|
||||
format_extension_summary(extension, out, width)
|
||||
out.write('\n')
|
||||
if hasattr(extension, 'supported_platforms'):
|
||||
format_supported_platforms(extension, out, width)
|
||||
out.write('\n')
|
||||
if extension.parameters:
|
||||
format_extension_parameters(extension, out, width)
|
||||
out.write('\n')
|
||||
@@ -72,6 +80,11 @@ def format_extension_summary(extension, out, width):
|
||||
out.write('{}\n'.format(format_body(strip_inlined_text(get_summary(extension)), width)))
|
||||
|
||||
|
||||
def format_supported_platforms(extension, out, width):
|
||||
text = 'supported on: {}'.format(', '.join(extension.supported_platforms))
|
||||
out.write('{}\n'.format(format_body(text, width)))
|
||||
|
||||
|
||||
def format_extension_description(extension, out, width):
|
||||
# skip the initial paragraph of multi-paragraph description, as already
|
||||
# listed above.
|
||||
@@ -93,9 +106,8 @@ def format_extension_parameters(extension, out, width, shift=4):
|
||||
param_text += indent('allowed values: {}\n'.format(', '.join(map(str, param.allowed_values))))
|
||||
elif param.constraint:
|
||||
param_text += indent('constraint: {}\n'.format(get_type_name(param.constraint)))
|
||||
if param.default:
|
||||
if param.default is not None:
|
||||
param_text += indent('default: {}\n'.format(param.default))
|
||||
param_texts.append(indent(param_text, shift))
|
||||
|
||||
out.write(format_column('\n'.join(param_texts), width))
|
||||
|
||||
|
@@ -14,7 +14,7 @@ class ${class_name}(AndroidBenchmark):
|
||||
|
||||
parameters = [
|
||||
# Workload parameters go here e.g.
|
||||
Parameter('Example parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
|
||||
Parameter('example_parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
|
||||
description='This is an example parameter')
|
||||
]
|
||||
|
||||
|
@@ -14,7 +14,7 @@ class ${class_name}(AndroidUiAutoBenchmark):
|
||||
|
||||
parameters = [
|
||||
# Workload parameters go here e.g.
|
||||
Parameter('Example parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
|
||||
Parameter('example_parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
|
||||
description='This is an example parameter')
|
||||
]
|
||||
|
||||
|
@@ -8,7 +8,7 @@ class ${class_name}(Workload):
|
||||
|
||||
parameters = [
|
||||
# Workload parameters go here e.g.
|
||||
Parameter('Example parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
|
||||
Parameter('example_parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
|
||||
description='This is an example parameter')
|
||||
]
|
||||
|
||||
|
@@ -8,7 +8,7 @@ class ${class_name}(UiAutomatorWorkload):
|
||||
|
||||
parameters = [
|
||||
# Workload parameters go here e.g.
|
||||
Parameter('Example parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
|
||||
Parameter('example_parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
|
||||
description='This is an example parameter')
|
||||
]
|
||||
|
||||
|
@@ -12,5 +12,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
|
BIN
wlauto/common/android/BaseUiAutomation$1.class
Normal file
BIN
wlauto/common/android/BaseUiAutomation$1.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/BaseUiAutomation$ActionLogger.class
Normal file
BIN
wlauto/common/android/BaseUiAutomation$ActionLogger.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/BaseUiAutomation$Direction.class
Normal file
BIN
wlauto/common/android/BaseUiAutomation$Direction.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/BaseUiAutomation$FindByCriteria.class
Normal file
BIN
wlauto/common/android/BaseUiAutomation$FindByCriteria.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/BaseUiAutomation$PinchType.class
Normal file
BIN
wlauto/common/android/BaseUiAutomation$PinchType.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/BaseUiAutomation$ScreenOrientation.class
Normal file
BIN
wlauto/common/android/BaseUiAutomation$ScreenOrientation.class
Normal file
Binary file not shown.
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$1.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$1.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$ActionLogger.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$ActionLogger.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$Direction.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$Direction.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$GestureTestParams.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$GestureTestParams.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$GestureType.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$GestureType.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$PinchType.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$PinchType.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$SurfaceLogger.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$SurfaceLogger.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$Timer.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$Timer.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation$UxPerfLogger.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation$UxPerfLogger.class
Normal file
Binary file not shown.
BIN
wlauto/common/android/UxPerfUiAutomation.class
Normal file
BIN
wlauto/common/android/UxPerfUiAutomation.class
Normal file
Binary file not shown.
@@ -12,5 +12,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
|
@@ -21,18 +21,23 @@ import time
|
||||
import tempfile
|
||||
import shutil
|
||||
import threading
|
||||
import json
|
||||
import xml.dom.minidom
|
||||
from subprocess import CalledProcessError
|
||||
|
||||
from wlauto.core.extension import Parameter
|
||||
from wlauto.common.linux.device import BaseLinuxDevice
|
||||
from wlauto.common.resources import Executable
|
||||
from wlauto.core.resource import NO_ONE
|
||||
from wlauto.common.linux.device import BaseLinuxDevice, PsEntry
|
||||
from wlauto.exceptions import DeviceError, WorkerThreadError, TimeoutError, DeviceNotRespondingError
|
||||
from wlauto.utils.misc import convert_new_lines
|
||||
from wlauto.utils.misc import convert_new_lines, ABI_MAP
|
||||
from wlauto.utils.types import boolean, regex
|
||||
from wlauto.utils.android import (adb_shell, adb_background_shell, adb_list_devices,
|
||||
adb_command, AndroidProperties, ANDROID_VERSION_MAP)
|
||||
|
||||
|
||||
SCREEN_STATE_REGEX = re.compile('(?:mPowerState|mScreenOn)=([0-9]+|true|false)', re.I)
|
||||
SCREEN_STATE_REGEX = re.compile('(?:mPowerState|mScreenOn|Display Power: state)=([0-9]+|true|false|ON|OFF)', re.I)
|
||||
SCREEN_SIZE_REGEX = re.compile(r'mUnrestrictedScreen=\(\d+,\d+\)\s+(?P<width>\d+)x(?P<height>\d+)')
|
||||
|
||||
|
||||
class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
@@ -46,11 +51,10 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
parameters = [
|
||||
Parameter('adb_name',
|
||||
description='The unique ID of the device as output by "adb devices".'),
|
||||
Parameter('android_prompt', kind=regex, default=re.compile('^.*(shell|root)@.*:/ [#$] ', re.MULTILINE),
|
||||
Parameter('android_prompt', kind=regex, default=re.compile('^.*(shell|root)@.*:/\S* [#$] ', re.MULTILINE),
|
||||
description='The format of matching the shell prompt in Android.'),
|
||||
Parameter('working_directory', default='/sdcard/wa-working',
|
||||
description='Directory that will be used WA on the device for output files etc.'),
|
||||
Parameter('binaries_directory', default='/system/bin',
|
||||
Parameter('working_directory', default='/sdcard/wa-working', override=True),
|
||||
Parameter('binaries_directory', default='/data/local/tmp/wa-bin', override=True,
|
||||
description='Location of binaries on the device.'),
|
||||
Parameter('package_data_directory', default='/data/data',
|
||||
description='Location of of data for an installed package (APK).'),
|
||||
@@ -71,6 +75,12 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
Specified whether the device should make sure that the screen is on
|
||||
during initialization.
|
||||
"""),
|
||||
Parameter('swipe_to_unlock', kind=str, default=None,
|
||||
allowed_values=[None, "horizontal", "vertical"],
|
||||
description="""
|
||||
If set a swipe of the specified direction will be performed.
|
||||
This should unlock the screen.
|
||||
"""),
|
||||
]
|
||||
|
||||
default_timeout = 30
|
||||
@@ -98,19 +108,34 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
|
||||
@property
|
||||
def abi(self):
|
||||
return self.getprop()['ro.product.cpu.abi'].split('-')[0]
|
||||
val = self.getprop()['ro.product.cpu.abi'].split('-')[0]
|
||||
for abi, architectures in ABI_MAP.iteritems():
|
||||
if val in architectures:
|
||||
return abi
|
||||
return val
|
||||
|
||||
@property
|
||||
def supported_eabi(self):
|
||||
def supported_abi(self):
|
||||
props = self.getprop()
|
||||
result = [props['ro.product.cpu.abi']]
|
||||
if 'ro.product.cpu.abi2' in props:
|
||||
result.append(props['ro.product.cpu.abi2'])
|
||||
if 'ro.product.cpu.abilist' in props:
|
||||
for eabi in props['ro.product.cpu.abilist'].split(','):
|
||||
if eabi not in result:
|
||||
result.append(eabi)
|
||||
return result
|
||||
for abi in props['ro.product.cpu.abilist'].split(','):
|
||||
if abi not in result:
|
||||
result.append(abi)
|
||||
|
||||
mapped_result = []
|
||||
for supported_abi in result:
|
||||
for abi, architectures in ABI_MAP.iteritems():
|
||||
found = False
|
||||
if supported_abi in architectures and abi not in mapped_result:
|
||||
mapped_result.append(abi)
|
||||
found = True
|
||||
break
|
||||
if not found and supported_abi not in mapped_result:
|
||||
mapped_result.append(supported_abi)
|
||||
return mapped_result
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(AndroidDevice, self).__init__(**kwargs)
|
||||
@@ -126,8 +151,11 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
self._is_ready = False
|
||||
self._just_rebooted = True
|
||||
|
||||
def boot(self, **kwargs):
|
||||
self.reset()
|
||||
def boot(self, hard=False, **kwargs):
|
||||
if hard:
|
||||
self.hard_reset()
|
||||
else:
|
||||
self.reset()
|
||||
|
||||
def connect(self): # NOQA pylint: disable=R0912
|
||||
iteration_number = 0
|
||||
@@ -155,7 +183,7 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
raise DeviceError('Could not boot {} ({}).'.format(self.name, self.adb_name))
|
||||
|
||||
while iteration_number < max_iterations:
|
||||
available = (1 == int('0' + adb_shell(self.adb_name, 'getprop sys.boot_completed', timeout=self.default_timeout)))
|
||||
available = (int('0' + (adb_shell(self.adb_name, 'getprop sys.boot_completed', timeout=self.default_timeout))) == 1)
|
||||
if available:
|
||||
break
|
||||
else:
|
||||
@@ -182,18 +210,13 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
self._just_rebooted = False
|
||||
self._is_ready = True
|
||||
|
||||
def initialize(self, context, *args, **kwargs):
|
||||
self.execute('mkdir -p {}'.format(self.working_directory))
|
||||
def initialize(self, context):
|
||||
self.sqlite = self.deploy_sqlite3(context) # pylint: disable=attribute-defined-outside-init
|
||||
if self.is_rooted:
|
||||
if not self.executable_is_installed('busybox'):
|
||||
self.busybox = self.deploy_busybox(context)
|
||||
else:
|
||||
self.busybox = 'busybox'
|
||||
self.disable_screen_lock()
|
||||
self.disable_selinux()
|
||||
if self.enable_screen_check:
|
||||
self.ensure_screen_is_on()
|
||||
self.init(context, *args, **kwargs)
|
||||
|
||||
def disconnect(self):
|
||||
if self._logcat_poller:
|
||||
@@ -231,7 +254,8 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
.. note:: This will get reset on userdata erasure.
|
||||
|
||||
"""
|
||||
return self.execute('settings get secure android_id').strip()
|
||||
output = self.execute('content query --uri content://settings/secure --projection value --where "name=\'android_id\'"').strip()
|
||||
return output.split('value=')[-1]
|
||||
|
||||
def get_sdk_version(self):
|
||||
try:
|
||||
@@ -253,6 +277,24 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
return line.split('=', 1)[1]
|
||||
return None
|
||||
|
||||
def get_installed_package_abi(self, package):
|
||||
"""
|
||||
Returns the primary abi of the specified package if it is installed
|
||||
on the device, or ``None`` otherwise.
|
||||
"""
|
||||
output = self.execute('dumpsys package {}'.format(package))
|
||||
val = None
|
||||
for line in convert_new_lines(output).split('\n'):
|
||||
if 'primaryCpuAbi' in line:
|
||||
val = line.split('=', 1)[1]
|
||||
break
|
||||
if val == 'null':
|
||||
return None
|
||||
for abi, architectures in ABI_MAP.iteritems():
|
||||
if val in architectures:
|
||||
return abi
|
||||
return val
|
||||
|
||||
def list_packages(self):
|
||||
"""
|
||||
List packages installed on the device.
|
||||
@@ -274,11 +316,24 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
"""
|
||||
return package_name in self.list_packages()
|
||||
|
||||
def executable_is_installed(self, executable_name):
|
||||
return executable_name in self.listdir(self.binaries_directory)
|
||||
def executable_is_installed(self, executable_name): # pylint: disable=unused-argument,no-self-use
|
||||
raise AttributeError("""Instead of using is_installed, please use
|
||||
``get_binary_path`` or ``install_if_needed`` instead. You should
|
||||
use the path returned by these functions to then invoke the binary
|
||||
|
||||
please see: https://pythonhosted.org/wlauto/writing_extensions.html""")
|
||||
|
||||
def is_installed(self, name):
|
||||
return self.executable_is_installed(name) or self.package_is_installed(name)
|
||||
if self.package_is_installed(name):
|
||||
return True
|
||||
elif "." in name: # assumes android packages have a . in their name and binaries documentation
|
||||
return False
|
||||
else:
|
||||
raise AttributeError("""Instead of using is_installed, please use
|
||||
``get_binary_path`` or ``install_if_needed`` instead. You should
|
||||
use the path returned by these functions to then invoke the binary
|
||||
|
||||
please see: https://pythonhosted.org/wlauto/writing_extensions.html""")
|
||||
|
||||
def listdir(self, path, as_root=False, **kwargs):
|
||||
contents = self.execute('ls {}'.format(path), as_root=as_root)
|
||||
@@ -290,13 +345,16 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
|
||||
"""
|
||||
self._check_ready()
|
||||
if not as_root:
|
||||
adb_command(self.adb_name, "push '{}' '{}'".format(source, dest), timeout=timeout)
|
||||
else:
|
||||
device_tempfile = self.path.join(self.file_transfer_cache, source.lstrip(self.path.sep))
|
||||
self.execute('mkdir -p {}'.format(self.path.dirname(device_tempfile)))
|
||||
adb_command(self.adb_name, "push '{}' '{}'".format(source, device_tempfile), timeout=timeout)
|
||||
self.execute('cp {} {}'.format(device_tempfile, dest), as_root=True)
|
||||
try:
|
||||
if not as_root:
|
||||
adb_command(self.adb_name, "push '{}' '{}'".format(source, dest), timeout=timeout)
|
||||
else:
|
||||
device_tempfile = self.path.join(self.file_transfer_cache, source.lstrip(self.path.sep))
|
||||
self.execute('mkdir -p {}'.format(self.path.dirname(device_tempfile)))
|
||||
adb_command(self.adb_name, "push '{}' '{}'".format(source, device_tempfile), timeout=timeout)
|
||||
self.execute('cp {} {}'.format(device_tempfile, dest), as_root=True)
|
||||
except CalledProcessError as e:
|
||||
raise DeviceError(e)
|
||||
|
||||
def pull_file(self, source, dest, as_root=False, timeout=default_timeout): # pylint: disable=W0221
|
||||
"""
|
||||
@@ -304,45 +362,53 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
|
||||
"""
|
||||
self._check_ready()
|
||||
if not as_root:
|
||||
adb_command(self.adb_name, "pull '{}' '{}'".format(source, dest), timeout=timeout)
|
||||
else:
|
||||
device_tempfile = self.path.join(self.file_transfer_cache, source.lstrip(self.path.sep))
|
||||
self.execute('mkdir -p {}'.format(self.path.dirname(device_tempfile)))
|
||||
self.execute('cp {} {}'.format(source, device_tempfile), as_root=True)
|
||||
adb_command(self.adb_name, "pull '{}' '{}'".format(device_tempfile, dest), timeout=timeout)
|
||||
try:
|
||||
if not as_root:
|
||||
adb_command(self.adb_name, "pull '{}' '{}'".format(source, dest), timeout=timeout)
|
||||
else:
|
||||
device_tempfile = self.path.join(self.file_transfer_cache, source.lstrip(self.path.sep))
|
||||
self.execute('mkdir -p {}'.format(self.path.dirname(device_tempfile)))
|
||||
self.execute('cp {} {}'.format(source, device_tempfile), as_root=True)
|
||||
adb_command(self.adb_name, "pull '{}' '{}'".format(device_tempfile, dest), timeout=timeout)
|
||||
except CalledProcessError as e:
|
||||
raise DeviceError(e)
|
||||
|
||||
def delete_file(self, filepath, as_root=False): # pylint: disable=W0221
|
||||
self._check_ready()
|
||||
adb_shell(self.adb_name, "rm '{}'".format(filepath), as_root=as_root, timeout=self.default_timeout)
|
||||
adb_shell(self.adb_name, "rm -rf '{}'".format(filepath), as_root=as_root, timeout=self.default_timeout)
|
||||
|
||||
def file_exists(self, filepath):
|
||||
self._check_ready()
|
||||
output = adb_shell(self.adb_name, 'if [ -e \'{}\' ]; then echo 1; else echo 0; fi'.format(filepath),
|
||||
timeout=self.default_timeout)
|
||||
if int(output):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return bool(int(output))
|
||||
|
||||
def install(self, filepath, timeout=default_timeout, with_name=None): # pylint: disable=W0221
|
||||
def install(self, filepath, timeout=default_timeout, with_name=None, replace=False): # pylint: disable=W0221
|
||||
ext = os.path.splitext(filepath)[1].lower()
|
||||
if ext == '.apk':
|
||||
return self.install_apk(filepath, timeout)
|
||||
return self.install_apk(filepath, timeout, replace)
|
||||
else:
|
||||
return self.install_executable(filepath, with_name)
|
||||
|
||||
def install_apk(self, filepath, timeout=default_timeout): # pylint: disable=W0221
|
||||
def install_apk(self, filepath, timeout=default_timeout, replace=False, allow_downgrade=False): # pylint: disable=W0221
|
||||
self._check_ready()
|
||||
ext = os.path.splitext(filepath)[1].lower()
|
||||
if ext == '.apk':
|
||||
return adb_command(self.adb_name, "install {}".format(filepath), timeout=timeout)
|
||||
flags = []
|
||||
if replace:
|
||||
flags.append('-r') # Replace existing APK
|
||||
if allow_downgrade:
|
||||
flags.append('-d') # Install the APK even if a newer version is already installed
|
||||
if self.get_sdk_version() >= 23:
|
||||
flags.append('-g') # Grant all runtime permissions
|
||||
self.logger.debug("Replace APK = {}, ADB flags = '{}'".format(replace, ' '.join(flags)))
|
||||
return adb_command(self.adb_name, "install {} '{}'".format(' '.join(flags), filepath), timeout=timeout)
|
||||
else:
|
||||
raise DeviceError('Can\'t install {}: unsupported format.'.format(filepath))
|
||||
|
||||
def install_executable(self, filepath, with_name=None):
|
||||
"""
|
||||
Installs a binary executable on device. Requires root access. Returns
|
||||
Installs a binary executable on device. Returns
|
||||
the path to the installed binary, or ``None`` if the installation has failed.
|
||||
Optionally, ``with_name`` parameter may be used to specify a different name under
|
||||
which the executable will be installed.
|
||||
@@ -351,24 +417,14 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
Updated in version 2.1.5 with ``with_name`` parameter.
|
||||
|
||||
"""
|
||||
self._ensure_binaries_directory_is_writable()
|
||||
executable_name = with_name or os.path.basename(filepath)
|
||||
on_device_file = self.path.join(self.working_directory, executable_name)
|
||||
on_device_executable = self.path.join(self.binaries_directory, executable_name)
|
||||
self.push_file(filepath, on_device_file)
|
||||
matched = []
|
||||
for entry in self.list_file_systems():
|
||||
if self.binaries_directory.rstrip('/').startswith(entry.mount_point):
|
||||
matched.append(entry)
|
||||
|
||||
if matched:
|
||||
entry = sorted(matched, key=lambda x: len(x.mount_point))[-1]
|
||||
if 'rw' not in entry.options:
|
||||
self.execute('mount -o rw,remount {} {}'.format(entry.device, entry.mount_point), as_root=True)
|
||||
self.execute('cp {} {}'.format(on_device_file, on_device_executable), as_root=True)
|
||||
self.execute('chmod 0777 {}'.format(on_device_executable), as_root=True)
|
||||
return on_device_executable
|
||||
else:
|
||||
raise DeviceError('Could not find mount point for binaries directory {}'.format(self.binaries_directory))
|
||||
self.execute('cp {} {}'.format(on_device_file, on_device_executable), as_root=self.is_rooted)
|
||||
self.execute('chmod 0777 {}'.format(on_device_executable), as_root=self.is_rooted)
|
||||
return on_device_executable
|
||||
|
||||
def uninstall(self, package):
|
||||
self._check_ready()
|
||||
@@ -376,17 +432,15 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
|
||||
def uninstall_executable(self, executable_name):
|
||||
"""
|
||||
Requires root access.
|
||||
|
||||
Added in version 2.1.3.
|
||||
|
||||
"""
|
||||
on_device_executable = self.path.join(self.binaries_directory, executable_name)
|
||||
for entry in self.list_file_systems():
|
||||
if entry.mount_point == '/system':
|
||||
if 'rw' not in entry.options:
|
||||
self.execute('mount -o rw,remount {} /system'.format(entry.device), as_root=True)
|
||||
self.delete_file(on_device_executable)
|
||||
on_device_executable = self.get_binary_path(executable_name, search_system_binaries=False)
|
||||
if not on_device_executable:
|
||||
raise DeviceError("Could not uninstall {}, binary not found".format(on_device_executable))
|
||||
self._ensure_binaries_directory_is_writable()
|
||||
self.delete_file(on_device_executable, as_root=self.is_rooted)
|
||||
|
||||
def execute(self, command, timeout=default_timeout, check_exit_code=True, background=False,
|
||||
as_root=False, busybox=False, **kwargs):
|
||||
@@ -408,7 +462,7 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
|
||||
Added in version 2.1.3
|
||||
|
||||
.. note:: The device must be rooted to be able to use busybox.
|
||||
.. note:: The device must be rooted to be able to use some busybox features.
|
||||
|
||||
:param as_root: If ``True``, will attempt to execute command in privileged mode. The device
|
||||
must be rooted, otherwise an error will be raised. Defaults to ``False``.
|
||||
@@ -427,51 +481,85 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
if as_root and not self.is_rooted:
|
||||
raise DeviceError('Attempting to execute "{}" as root on unrooted device.'.format(command))
|
||||
if busybox:
|
||||
if not self.is_rooted:
|
||||
DeviceError('Attempting to execute "{}" with busybox. '.format(command) +
|
||||
'Busybox can only be deployed to rooted devices.')
|
||||
command = ' '.join([self.busybox, command])
|
||||
if background:
|
||||
return adb_background_shell(self.adb_name, command, as_root=as_root)
|
||||
else:
|
||||
return adb_shell(self.adb_name, command, timeout, check_exit_code, as_root)
|
||||
|
||||
def kick_off(self, command):
|
||||
def kick_off(self, command, as_root=None):
|
||||
"""
|
||||
Like execute but closes adb session and returns immediately, leaving the command running on the
|
||||
device (this is different from execute(background=True) which keeps adb connection open and returns
|
||||
a subprocess object).
|
||||
|
||||
.. note:: This relies on busybox's nohup applet and so won't work on unrooted devices.
|
||||
|
||||
Added in version 2.1.4
|
||||
|
||||
"""
|
||||
if not self.is_rooted:
|
||||
raise DeviceError('kick_off uses busybox\'s nohup applet and so can only be run a rooted device.')
|
||||
if as_root is None:
|
||||
as_root = self.is_rooted
|
||||
try:
|
||||
command = 'cd {} && busybox nohup {}'.format(self.working_directory, command)
|
||||
output = self.execute(command, timeout=1, as_root=True)
|
||||
command = 'cd {} && {} nohup {}'.format(self.working_directory, self.busybox, command)
|
||||
output = self.execute(command, timeout=1, as_root=as_root)
|
||||
except TimeoutError:
|
||||
pass
|
||||
else:
|
||||
raise ValueError('Background command exited before timeout; got "{}"'.format(output))
|
||||
|
||||
def get_pids_of(self, process_name):
|
||||
"""Returns a list of PIDs of all processes with the specified name."""
|
||||
result = (self.execute('ps | {} grep {}'.format(self.busybox, process_name),
|
||||
check_exit_code=False) or '').strip()
|
||||
if result and 'not found' not in result:
|
||||
return [int(x.split()[1]) for x in result.split('\n')]
|
||||
else:
|
||||
return []
|
||||
|
||||
def ps(self, **kwargs):
|
||||
"""
|
||||
Returns the list of running processes on the device. Keyword arguments may
|
||||
be used to specify simple filters for columns.
|
||||
|
||||
Added in version 2.1.4
|
||||
|
||||
"""
|
||||
lines = iter(convert_new_lines(self.execute('ps')).split('\n'))
|
||||
lines.next() # header
|
||||
result = []
|
||||
for line in lines:
|
||||
parts = line.split()
|
||||
if parts:
|
||||
result.append(PsEntry(*(parts[0:1] + map(int, parts[1:5]) + parts[5:])))
|
||||
if not kwargs:
|
||||
return result
|
||||
else:
|
||||
filtered_result = []
|
||||
for entry in result:
|
||||
if all(getattr(entry, k) == v for k, v in kwargs.iteritems()):
|
||||
filtered_result.append(entry)
|
||||
return filtered_result
|
||||
|
||||
def get_properties(self, context):
|
||||
"""Captures and saves the information from /system/build.prop and /proc/version"""
|
||||
props = super(AndroidDevice, self).get_properties(context)
|
||||
props.update(self._get_android_properties(context))
|
||||
return props
|
||||
|
||||
def _get_android_properties(self, context):
|
||||
props = {}
|
||||
props['android_id'] = self.get_android_id()
|
||||
buildprop_file = os.path.join(context.host_working_directory, 'build.prop')
|
||||
if not os.path.isfile(buildprop_file):
|
||||
self.pull_file('/system/build.prop', context.host_working_directory)
|
||||
self._update_build_properties(buildprop_file, props)
|
||||
context.add_run_artifact('build_properties', buildprop_file, 'export')
|
||||
self._update_build_properties(props)
|
||||
|
||||
version_file = os.path.join(context.host_working_directory, 'version')
|
||||
if not os.path.isfile(version_file):
|
||||
self.pull_file('/proc/version', context.host_working_directory)
|
||||
self._update_versions(version_file, props)
|
||||
context.add_run_artifact('device_version', version_file, 'export')
|
||||
dumpsys_target_file = self.path.join(self.working_directory, 'window.dumpsys')
|
||||
dumpsys_host_file = os.path.join(context.host_working_directory, 'window.dumpsys')
|
||||
self.execute('{} > {}'.format('dumpsys window', dumpsys_target_file))
|
||||
self.pull_file(dumpsys_target_file, dumpsys_host_file)
|
||||
context.add_run_artifact('dumpsys_window', dumpsys_host_file, 'meta')
|
||||
|
||||
prop_file = os.path.join(context.host_working_directory, 'android-props.json')
|
||||
with open(prop_file, 'w') as wfh:
|
||||
json.dump(props, wfh)
|
||||
context.add_run_artifact('android_properties', prop_file, 'export')
|
||||
return props
|
||||
|
||||
def getprop(self, prop=None):
|
||||
@@ -485,6 +573,11 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
return props[prop]
|
||||
return props
|
||||
|
||||
def deploy_sqlite3(self, context):
|
||||
host_file = context.resolver.get(Executable(NO_ONE, self.abi, 'sqlite3'))
|
||||
target_file = self.install_if_needed(host_file)
|
||||
return target_file
|
||||
|
||||
# Android-specific methods. These either rely on specifics of adb or other
|
||||
# Android-only concepts in their interface and/or implementation.
|
||||
|
||||
@@ -528,6 +621,30 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
else:
|
||||
return adb_shell(self.adb_name, 'logcat -c', timeout=self.default_timeout)
|
||||
|
||||
def get_screen_size(self):
|
||||
output = self.execute('dumpsys window')
|
||||
match = SCREEN_SIZE_REGEX.search(output)
|
||||
if match:
|
||||
return (int(match.group('width')),
|
||||
int(match.group('height')))
|
||||
else:
|
||||
return (0, 0)
|
||||
|
||||
def perform_unlock_swipe(self):
|
||||
width, height = self.get_screen_size()
|
||||
command = 'input swipe {} {} {} {}'
|
||||
if self.swipe_to_unlock == "horizontal":
|
||||
swipe_heigh = height * 2 // 3
|
||||
start = 100
|
||||
stop = width - start
|
||||
self.execute(command.format(start, swipe_heigh, stop, swipe_heigh))
|
||||
if self.swipe_to_unlock == "vertical":
|
||||
swipe_middle = height / 2
|
||||
swipe_heigh = height * 2 // 3
|
||||
self.execute(command.format(swipe_middle, swipe_heigh, swipe_middle, 0))
|
||||
else: # Should never reach here
|
||||
raise DeviceError("Invalid swipe direction: {}".format(self.swipe_to_unlock))
|
||||
|
||||
def capture_screen(self, filepath):
|
||||
"""Caputers the current device screen into the specified file in a PNG format."""
|
||||
on_device_file = self.path.join(self.working_directory, 'screen_capture.png')
|
||||
@@ -535,6 +652,17 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
self.pull_file(on_device_file, filepath)
|
||||
self.delete_file(on_device_file)
|
||||
|
||||
def capture_ui_hierarchy(self, filepath):
|
||||
"""Captures the current view hierarchy into the specified file in a XML format."""
|
||||
on_device_file = self.path.join(self.working_directory, 'screen_capture.xml')
|
||||
self.execute('uiautomator dump {}'.format(on_device_file))
|
||||
self.pull_file(on_device_file, filepath)
|
||||
self.delete_file(on_device_file)
|
||||
|
||||
parsed_xml = xml.dom.minidom.parse(filepath)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(parsed_xml.toprettyxml())
|
||||
|
||||
def is_screen_on(self):
|
||||
"""Returns ``True`` if the device screen is currently on, ``False`` otherwise."""
|
||||
output = self.execute('dumpsys power')
|
||||
@@ -547,6 +675,8 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
def ensure_screen_is_on(self):
|
||||
if not self.is_screen_on():
|
||||
self.execute('input keyevent 26')
|
||||
if self.swipe_to_unlock:
|
||||
self.perform_unlock_swipe()
|
||||
|
||||
def disable_screen_lock(self):
|
||||
"""
|
||||
@@ -558,8 +688,16 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
|
||||
"""
|
||||
lockdb = '/data/system/locksettings.db'
|
||||
sqlcommand = "update locksettings set value=\\'0\\' where name=\\'screenlock.disabled\\';"
|
||||
self.execute('sqlite3 {} "{}"'.format(lockdb, sqlcommand), as_root=True)
|
||||
sqlcommand = "update locksettings set value='0' where name='screenlock.disabled';"
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
try:
|
||||
f.write('{} {} "{}"'.format(self.sqlite, lockdb, sqlcommand))
|
||||
f.flush()
|
||||
on_device_executable = self.install_executable(f.name,
|
||||
with_name="disable_screen_lock")
|
||||
finally:
|
||||
f.close()
|
||||
self.execute(on_device_executable, as_root=True)
|
||||
|
||||
def disable_selinux(self):
|
||||
# This may be invoked from intialize() so we can't use execute() or the
|
||||
@@ -573,17 +711,30 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
if se_status == 'Enforcing':
|
||||
self.execute('setenforce 0', as_root=True)
|
||||
|
||||
def get_device_model(self):
|
||||
try:
|
||||
return self.getprop(prop='ro.product.device')
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def broadcast_media_mounted(self, dirpath):
|
||||
"""
|
||||
Force a re-index of the mediaserver cache for the specified directory.
|
||||
"""
|
||||
command = 'am broadcast -a android.intent.action.MEDIA_MOUNTED -d file://'
|
||||
self.execute(command + dirpath)
|
||||
|
||||
# Internal methods: do not use outside of the class.
|
||||
|
||||
def _update_build_properties(self, filepath, props):
|
||||
def _update_build_properties(self, props):
|
||||
try:
|
||||
with open(filepath) as fh:
|
||||
for line in fh:
|
||||
line = re.sub(r'#.*', '', line).strip()
|
||||
if not line:
|
||||
continue
|
||||
key, value = line.split('=', 1)
|
||||
props[key] = value
|
||||
def strip(somestring):
|
||||
return somestring.strip().replace('[', '').replace(']', '')
|
||||
for line in self.execute("getprop").splitlines():
|
||||
key, value = line.split(':', 1)
|
||||
key = strip(key)
|
||||
value = strip(value)
|
||||
props[key] = value
|
||||
except ValueError:
|
||||
self.logger.warning('Could not parse build.prop.')
|
||||
|
||||
@@ -599,6 +750,18 @@ class AndroidDevice(BaseLinuxDevice): # pylint: disable=W0223
|
||||
else:
|
||||
self.logger.warning('Could not parse version string.')
|
||||
|
||||
def _ensure_binaries_directory_is_writable(self):
|
||||
matched = []
|
||||
for entry in self.list_file_systems():
|
||||
if self.binaries_directory.rstrip('/').startswith(entry.mount_point):
|
||||
matched.append(entry)
|
||||
if matched:
|
||||
entry = sorted(matched, key=lambda x: len(x.mount_point))[-1]
|
||||
if 'rw' not in entry.options:
|
||||
self.execute('mount -o rw,remount {} {}'.format(entry.device, entry.mount_point), as_root=True)
|
||||
else:
|
||||
raise DeviceError('Could not find mount point for binaries directory {}'.format(self.binaries_directory))
|
||||
|
||||
|
||||
class _LogcatPoller(threading.Thread):
|
||||
|
||||
@@ -675,4 +838,3 @@ class BigLittleDevice(AndroidDevice): # pylint: disable=W0223
|
||||
parameters = [
|
||||
Parameter('scheduler', default='hmp', override=True),
|
||||
]
|
||||
|
||||
|
@@ -34,3 +34,10 @@ class JarFile(FileResource):
|
||||
class ApkFile(FileResource):
|
||||
|
||||
name = 'apk'
|
||||
|
||||
def __init__(self, owner, platform=None):
|
||||
super(ApkFile, self).__init__(owner)
|
||||
self.platform = platform
|
||||
|
||||
def __str__(self):
|
||||
return '<{}\'s {} APK>'.format(self.owner, self.platform)
|
||||
|
502
wlauto/common/android/workload.py
Normal file → Executable file
502
wlauto/common/android/workload.py
Normal file → Executable file
@@ -16,19 +16,30 @@
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from math import ceil
|
||||
|
||||
from wlauto.core.extension import Parameter
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from wlauto.core.extension import Parameter, ExtensionMeta, ListCollection
|
||||
from wlauto.core.workload import Workload
|
||||
from wlauto.core.resource import NO_ONE
|
||||
from wlauto.common.resources import ExtensionAsset, Executable
|
||||
from wlauto.exceptions import WorkloadError, ResourceError
|
||||
from wlauto.utils.android import ApkInfo
|
||||
from wlauto.common.android.resources import ApkFile, ReventFile
|
||||
from wlauto.common.resources import ExtensionAsset, Executable, File
|
||||
from wlauto.exceptions import WorkloadError, ResourceError, DeviceError
|
||||
from wlauto.utils.android import ApkInfo, ANDROID_NORMAL_PERMISSIONS, UNSUPPORTED_PACKAGES
|
||||
from wlauto.utils.types import boolean
|
||||
from wlauto.utils.revent import ReventRecording
|
||||
import wlauto.utils.statedetect as state_detector
|
||||
import wlauto.common.android.resources
|
||||
|
||||
|
||||
DELAY = 5
|
||||
|
||||
# Due to the way `super` works you have to call it at every level but WA executes some
|
||||
# methods conditionally and so has to call them directly via the class, this breaks super
|
||||
# and causes it to run things mutiple times ect. As a work around for this untill workloads
|
||||
# are reworked everything that subclasses workload calls parent methods explicitly
|
||||
|
||||
|
||||
class UiAutomatorWorkload(Workload):
|
||||
"""
|
||||
@@ -66,7 +77,7 @@ class UiAutomatorWorkload(Workload):
|
||||
|
||||
def __init__(self, device, _call_super=True, **kwargs): # pylint: disable=W0613
|
||||
if _call_super:
|
||||
super(UiAutomatorWorkload, self).__init__(device, **kwargs)
|
||||
Workload.__init__(self, device, **kwargs)
|
||||
self.uiauto_file = None
|
||||
self.device_uiauto_file = None
|
||||
self.command = None
|
||||
@@ -82,12 +93,13 @@ class UiAutomatorWorkload(Workload):
|
||||
self.uiauto_package = os.path.splitext(os.path.basename(self.uiauto_file))[0]
|
||||
|
||||
def setup(self, context):
|
||||
Workload.setup(self, context)
|
||||
method_string = '{}.{}#{}'.format(self.uiauto_package, self.uiauto_class, self.uiauto_method)
|
||||
params_dict = self.uiauto_params
|
||||
params_dict['workdir'] = self.device.working_directory
|
||||
params = ''
|
||||
for k, v in self.uiauto_params.iteritems():
|
||||
params += ' -e {} {}'.format(k, v)
|
||||
params += ' -e {} "{}"'.format(k, v)
|
||||
self.command = 'uiautomator runtest {}{} -c {}'.format(self.device_uiauto_file, params, method_string)
|
||||
self.device.push_file(self.uiauto_file, self.device_uiauto_file)
|
||||
self.device.killall('uiautomator')
|
||||
@@ -122,10 +134,16 @@ class ApkWorkload(Workload):
|
||||
:package: The package name of the app. This is usually a Java-style name of the form
|
||||
``com.companyname.appname``.
|
||||
:activity: This is the initial activity of the app. This will be used to launch the
|
||||
app during the setup.
|
||||
app during the setup. Many applications do not specify a launch activity so
|
||||
this may be left blank if necessary.
|
||||
:view: The class of the main view pane of the app. This needs to be defined in order
|
||||
to collect SurfaceFlinger-derived statistics (such as FPS) for the app, but
|
||||
may otherwise be left as ``None``.
|
||||
:launch_main: If ``False``, the default activity will not be launched (during setup),
|
||||
allowing workloads to start the app with an intent of their choice in
|
||||
the run step. This is useful for apps without a launchable default/main
|
||||
activity or those where it cannot be launched without intent data (which
|
||||
is provided at the run phase).
|
||||
:install_timeout: Timeout for the installation of the APK. This may vary wildly based on
|
||||
the size and nature of a specific APK, and so should be defined on
|
||||
per-workload basis.
|
||||
@@ -135,6 +153,9 @@ class ApkWorkload(Workload):
|
||||
so, as with all timeouts, so leeway must be included in
|
||||
the specified value.
|
||||
|
||||
:min_apk_version: The minimum supported apk version for this workload. May be ``None``.
|
||||
:max_apk_version: The maximum supported apk version for this workload. May be ``None``.
|
||||
|
||||
.. note:: Both package and activity for a workload may be obtained from the APK using
|
||||
the ``aapt`` tool that comes with the ADT (Android Developemnt Tools) bundle.
|
||||
|
||||
@@ -142,57 +163,223 @@ class ApkWorkload(Workload):
|
||||
package = None
|
||||
activity = None
|
||||
view = None
|
||||
install_timeout = None
|
||||
default_install_timeout = 300
|
||||
min_apk_version = None
|
||||
max_apk_version = None
|
||||
supported_platforms = ['android']
|
||||
launch_main = True
|
||||
|
||||
parameters = [
|
||||
Parameter('install_timeout', kind=int, default=300,
|
||||
description='Timeout for the installation of the apk.'),
|
||||
Parameter('check_apk', kind=boolean, default=True,
|
||||
description='''
|
||||
When set to True the APK file on the host will be prefered if
|
||||
it is a valid version and ABI, if not it will fall back to the
|
||||
version on the targer. When set to False the target version is
|
||||
prefered.
|
||||
'''),
|
||||
Parameter('force_install', kind=boolean, default=False,
|
||||
description='''
|
||||
Always re-install the APK, even if matching version is found already installed
|
||||
on the device. Runs ``adb install -r`` to ensure existing APK is replaced. When
|
||||
this is set, check_apk is ignored.
|
||||
'''),
|
||||
Parameter('uninstall_apk', kind=boolean, default=False,
|
||||
description="If ``True``, will uninstall workload's APK as part of teardown."),
|
||||
description='If ``True``, will uninstall workload\'s APK as part of teardown.'),
|
||||
Parameter('exact_abi', kind=bool, default=False,
|
||||
description='''
|
||||
If ``True``, workload will check that the APK matches the target
|
||||
device ABI, otherwise any APK found will be used.
|
||||
'''),
|
||||
]
|
||||
|
||||
def __init__(self, device, _call_super=True, **kwargs):
|
||||
if _call_super:
|
||||
super(ApkWorkload, self).__init__(device, **kwargs)
|
||||
Workload.__init__(self, device, **kwargs)
|
||||
self.apk_file = None
|
||||
self.apk_version = None
|
||||
self.logcat_log = None
|
||||
self.force_reinstall = kwargs.get('force_reinstall', False)
|
||||
if not self.install_timeout:
|
||||
self.install_timeout = self.default_install_timeout
|
||||
self.exact_apk_version = None
|
||||
self.exact_abi = kwargs.get('exact_abi')
|
||||
|
||||
def init_resources(self, context):
|
||||
self.apk_file = context.resolver.get(wlauto.common.android.resources.ApkFile(self), version=getattr(self, 'version', None))
|
||||
def setup(self, context): # pylint: disable=too-many-branches
|
||||
Workload.setup(self, context)
|
||||
|
||||
def setup(self, context):
|
||||
self.initialize_package(context)
|
||||
self.start_activity()
|
||||
# Get target version
|
||||
target_version = self.device.get_installed_package_version(self.package)
|
||||
if target_version:
|
||||
target_version = LooseVersion(target_version)
|
||||
self.logger.debug("Found version '{}' on target device".format(target_version))
|
||||
|
||||
# Get host version
|
||||
self.apk_file = context.resolver.get(ApkFile(self, self.device.abi),
|
||||
version=getattr(self, 'version', None),
|
||||
variant_name=getattr(self, 'variant_name', None),
|
||||
strict=False)
|
||||
|
||||
# Get target abi
|
||||
target_abi = self.device.get_installed_package_abi(self.package)
|
||||
if target_abi:
|
||||
self.logger.debug("Found apk with primary abi '{}' on target device".format(target_abi))
|
||||
|
||||
# Get host version, primary abi is first, and then try to find supported.
|
||||
for abi in self.device.supported_abi:
|
||||
self.apk_file = context.resolver.get(ApkFile(self, abi),
|
||||
version=getattr(self, 'version', None),
|
||||
variant_name=getattr(self, 'variant_name', None),
|
||||
strict=False)
|
||||
|
||||
# Stop if apk found, or if exact_abi is set only look for primary abi.
|
||||
if self.apk_file or self.exact_abi:
|
||||
break
|
||||
|
||||
host_version = None
|
||||
if self.apk_file is not None:
|
||||
host_version = ApkInfo(self.apk_file).version_name
|
||||
if host_version:
|
||||
host_version = LooseVersion(host_version)
|
||||
self.logger.debug("Found version '{}' on host".format(host_version))
|
||||
|
||||
# Error if apk was not found anywhere
|
||||
if target_version is None and host_version is None:
|
||||
msg = "Could not find APK for '{}' on the host or target device"
|
||||
raise ResourceError(msg.format(self.name))
|
||||
|
||||
if self.exact_apk_version is not None:
|
||||
if self.exact_apk_version != target_version and self.exact_apk_version != host_version:
|
||||
msg = "APK version '{}' not found on the host '{}' or target '{}'"
|
||||
raise ResourceError(msg.format(self.exact_apk_version, host_version, target_version))
|
||||
|
||||
# Error if exact_abi and suitable apk not found on host and incorrect version on device
|
||||
if self.exact_abi and host_version is None:
|
||||
if target_abi != self.device.abi:
|
||||
msg = "APK abi '{}' not found on the host and target is '{}'"
|
||||
raise ResourceError(msg.format(self.device.abi, target_abi))
|
||||
|
||||
# Ensure the apk is setup on the device
|
||||
if self.force_install:
|
||||
self.force_install_apk(context, host_version)
|
||||
elif self.check_apk:
|
||||
self.prefer_host_apk(context, host_version, target_version)
|
||||
else:
|
||||
self.prefer_target_apk(context, host_version, target_version)
|
||||
|
||||
self.reset(context)
|
||||
self.apk_version = self.device.get_installed_package_version(self.package)
|
||||
context.add_classifiers(apk_version=self.apk_version)
|
||||
|
||||
if self.launch_main:
|
||||
self.launch_package() # launch default activity without intent data
|
||||
self.device.execute('am kill-all') # kill all *background* activities
|
||||
self.device.clear_logcat()
|
||||
|
||||
def initialize_package(self, context):
|
||||
installed_version = self.device.get_installed_package_version(self.package)
|
||||
host_version = ApkInfo(self.apk_file).version_name
|
||||
if installed_version != host_version:
|
||||
if installed_version:
|
||||
message = '{} host version: {}, device version: {}; re-installing...'
|
||||
self.logger.debug(message.format(os.path.basename(self.apk_file), host_version, installed_version))
|
||||
else:
|
||||
message = '{} host version: {}, not found on device; installing...'
|
||||
self.logger.debug(message.format(os.path.basename(self.apk_file), host_version))
|
||||
self.force_reinstall = True
|
||||
else:
|
||||
message = '{} version {} found on both device and host.'
|
||||
self.logger.debug(message.format(os.path.basename(self.apk_file), host_version))
|
||||
if self.force_reinstall:
|
||||
if installed_version:
|
||||
self.device.uninstall(self.package)
|
||||
self.install_apk(context)
|
||||
else:
|
||||
self.reset(context)
|
||||
self.apk_version = host_version
|
||||
def force_install_apk(self, context, host_version):
|
||||
if host_version is None:
|
||||
raise ResourceError("force_install is 'True' but could not find APK on the host")
|
||||
try:
|
||||
self.validate_version(host_version)
|
||||
except ResourceError as e:
|
||||
msg = "force_install is 'True' but the host version is invalid:\n\t{}"
|
||||
raise ResourceError(msg.format(str(e)))
|
||||
self.install_apk(context, replace=True)
|
||||
|
||||
def start_activity(self):
|
||||
output = self.device.execute('am start -W -n {}/{}'.format(self.package, self.activity))
|
||||
def prefer_host_apk(self, context, host_version, target_version):
|
||||
msg = "check_apk is 'True' "
|
||||
if host_version is None:
|
||||
try:
|
||||
self.validate_version(target_version)
|
||||
except ResourceError as e:
|
||||
msg += "but the APK was not found on the host and the target version is invalid:\n\t{}"
|
||||
raise ResourceError(msg.format(str(e)))
|
||||
else:
|
||||
msg += "but the APK was not found on the host, using target version"
|
||||
self.logger.debug(msg)
|
||||
return
|
||||
try:
|
||||
self.validate_version(host_version)
|
||||
except ResourceError as e1:
|
||||
msg += "but the host APK version is invalid:\n\t{}\n"
|
||||
if target_version is None:
|
||||
msg += "The target does not have the app either"
|
||||
raise ResourceError(msg.format(str(e1)))
|
||||
try:
|
||||
self.validate_version(target_version)
|
||||
except ResourceError as e2:
|
||||
msg += "The target version is also invalid:\n\t{}"
|
||||
raise ResourceError(msg.format(str(e1), str(e2)))
|
||||
else:
|
||||
msg += "using the target version instead"
|
||||
self.logger.debug(msg.format(str(e1)))
|
||||
else: # Host version is valid
|
||||
if target_version is not None and target_version == host_version:
|
||||
msg += " and a matching version is alread on the device, doing nothing"
|
||||
self.logger.debug(msg)
|
||||
return
|
||||
msg += " and the host version is not on the target, installing APK"
|
||||
self.logger.debug(msg)
|
||||
self.install_apk(context, replace=True)
|
||||
|
||||
def prefer_target_apk(self, context, host_version, target_version):
|
||||
msg = "check_apk is 'False' "
|
||||
if target_version is None:
|
||||
try:
|
||||
self.validate_version(host_version)
|
||||
except ResourceError as e:
|
||||
msg += "but the app was not found on the target and the host version is invalid:\n\t{}"
|
||||
raise ResourceError(msg.format(str(e)))
|
||||
else:
|
||||
msg += "and the app was not found on the target, using host version"
|
||||
self.logger.debug(msg)
|
||||
self.install_apk(context)
|
||||
return
|
||||
try:
|
||||
self.validate_version(target_version)
|
||||
except ResourceError as e1:
|
||||
msg += "but the target app version is invalid:\n\t{}\n"
|
||||
if host_version is None:
|
||||
msg += "The host does not have the APK either"
|
||||
raise ResourceError(msg.format(str(e1)))
|
||||
try:
|
||||
self.validate_version(host_version)
|
||||
except ResourceError as e2:
|
||||
msg += "The host version is also invalid:\n\t{}"
|
||||
raise ResourceError(msg.format(str(e1), str(e2)))
|
||||
else:
|
||||
msg += "Using the host APK instead"
|
||||
self.logger.debug(msg.format(str(e1)))
|
||||
self.install_apk(context, replace=True)
|
||||
else:
|
||||
msg += "and a valid version of the app is already on the target, using target app"
|
||||
self.logger.debug(msg)
|
||||
|
||||
def validate_version(self, version):
|
||||
min_apk_version = getattr(self, 'min_apk_version', None)
|
||||
max_apk_version = getattr(self, 'max_apk_version', None)
|
||||
|
||||
if min_apk_version is not None and max_apk_version is not None:
|
||||
if version < LooseVersion(min_apk_version) or \
|
||||
version > LooseVersion(max_apk_version):
|
||||
msg = "version '{}' not supported. " \
|
||||
"Minimum version required: '{}', Maximum version known to work: '{}'"
|
||||
raise ResourceError(msg.format(version, min_apk_version, max_apk_version))
|
||||
|
||||
elif min_apk_version is not None:
|
||||
if version < LooseVersion(min_apk_version):
|
||||
msg = "version '{}' not supported. " \
|
||||
"Minimum version required: '{}'"
|
||||
raise ResourceError(msg.format(version, min_apk_version))
|
||||
|
||||
elif max_apk_version is not None:
|
||||
if version > LooseVersion(max_apk_version):
|
||||
msg = "version '{}' not supported. " \
|
||||
"Maximum version known to work: '{}'"
|
||||
raise ResourceError(msg.format(version, max_apk_version))
|
||||
|
||||
def launch_package(self):
|
||||
if not self.activity:
|
||||
output = self.device.execute('am start -W {}'.format(self.package))
|
||||
else:
|
||||
output = self.device.execute('am start -W -n {}/{}'.format(self.package, self.activity))
|
||||
if 'Error:' in output:
|
||||
self.device.execute('am force-stop {}'.format(self.package)) # this will dismiss any erro dialogs
|
||||
raise WorkloadError(output)
|
||||
@@ -202,19 +389,62 @@ class ApkWorkload(Workload):
|
||||
self.device.execute('am force-stop {}'.format(self.package))
|
||||
self.device.execute('pm clear {}'.format(self.package))
|
||||
|
||||
def install_apk(self, context):
|
||||
output = self.device.install(self.apk_file, self.install_timeout)
|
||||
# As of android API level 23, apps can request permissions at runtime,
|
||||
# this will grant all of them so requests do not pop up when running the app
|
||||
# This can also be done less "manually" during adb install using the -g flag
|
||||
if self.device.get_sdk_version() >= 23:
|
||||
self._grant_requested_permissions()
|
||||
|
||||
def install_apk(self, context, replace=False):
|
||||
success = False
|
||||
if replace:
|
||||
self.device.uninstall(self.package)
|
||||
output = self.device.install_apk(self.apk_file, timeout=self.install_timeout,
|
||||
replace=replace, allow_downgrade=True)
|
||||
if 'Failure' in output:
|
||||
if 'ALREADY_EXISTS' in output:
|
||||
self.logger.warn('Using already installed APK (did not unistall properly?)')
|
||||
self.reset(context)
|
||||
else:
|
||||
raise WorkloadError(output)
|
||||
else:
|
||||
self.logger.debug(output)
|
||||
success = True
|
||||
self.do_post_install(context)
|
||||
return success
|
||||
|
||||
def _grant_requested_permissions(self):
|
||||
dumpsys_output = self.device.execute(command="dumpsys package {}".format(self.package))
|
||||
permissions = []
|
||||
lines = iter(dumpsys_output.splitlines())
|
||||
for line in lines:
|
||||
if "requested permissions:" in line:
|
||||
break
|
||||
|
||||
for line in lines:
|
||||
if "android.permission." in line:
|
||||
permissions.append(line.split(":")[0].strip())
|
||||
# Matching either of these means the end of requested permissions section
|
||||
elif "install permissions:" in line or "runtime permissions:" in line:
|
||||
break
|
||||
|
||||
for permission in set(permissions):
|
||||
# "Normal" Permisions are automatically granted and cannot be changed
|
||||
permission_name = permission.rsplit('.', 1)[1]
|
||||
if permission_name not in ANDROID_NORMAL_PERMISSIONS:
|
||||
# On some API 23+ devices, this may fail with a SecurityException
|
||||
# on previously granted permissions. In that case, just skip as it
|
||||
# is not fatal to the workload execution
|
||||
try:
|
||||
self.device.execute("pm grant {} {}".format(self.package, permission))
|
||||
except DeviceError as e:
|
||||
if "changeable permission" in e.message or "Unknown permission" in e.message:
|
||||
self.logger.debug(e)
|
||||
else:
|
||||
raise e
|
||||
|
||||
def do_post_install(self, context):
|
||||
""" May be overwritten by dervied classes."""
|
||||
""" May be overwritten by derived classes."""
|
||||
pass
|
||||
|
||||
def run(self, context):
|
||||
@@ -233,37 +463,44 @@ class ApkWorkload(Workload):
|
||||
if self.uninstall_apk:
|
||||
self.device.uninstall(self.package)
|
||||
|
||||
def validate(self):
|
||||
if not self.apk_file:
|
||||
raise WorkloadError('No APK file found for workload {}.'.format(self.name))
|
||||
|
||||
|
||||
AndroidBenchmark = ApkWorkload # backward compatibility
|
||||
|
||||
|
||||
class ReventWorkload(Workload):
|
||||
|
||||
default_setup_timeout = 5 * 60 # in seconds
|
||||
default_run_timeout = 10 * 60 # in seconds
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
|
||||
def __init__(self, device, _call_super=True, **kwargs):
|
||||
if _call_super:
|
||||
super(ReventWorkload, self).__init__(device, **kwargs)
|
||||
Workload.__init__(self, device, **kwargs)
|
||||
devpath = self.device.path
|
||||
self.on_device_revent_binary = devpath.join(self.device.working_directory, 'revent')
|
||||
self.on_device_setup_revent = devpath.join(self.device.working_directory, '{}.setup.revent'.format(self.device.name))
|
||||
self.on_device_run_revent = devpath.join(self.device.working_directory, '{}.run.revent'.format(self.device.name))
|
||||
self.setup_timeout = kwargs.get('setup_timeout', self.default_setup_timeout)
|
||||
self.run_timeout = kwargs.get('run_timeout', self.default_run_timeout)
|
||||
self.on_device_revent_binary = devpath.join(self.device.binaries_directory, 'revent')
|
||||
self.setup_timeout = kwargs.get('setup_timeout', None)
|
||||
self.run_timeout = kwargs.get('run_timeout', None)
|
||||
self.revent_setup_file = None
|
||||
self.revent_run_file = None
|
||||
self.on_device_setup_revent = None
|
||||
self.on_device_run_revent = None
|
||||
self.statedefs_dir = None
|
||||
|
||||
def init_resources(self, context):
|
||||
self.revent_setup_file = context.resolver.get(wlauto.common.android.resources.ReventFile(self, 'setup'))
|
||||
self.revent_run_file = context.resolver.get(wlauto.common.android.resources.ReventFile(self, 'run'))
|
||||
if self.check_states:
|
||||
state_detector.check_match_state_dependencies()
|
||||
|
||||
def setup(self, context):
|
||||
self.revent_setup_file = context.resolver.get(ReventFile(self, 'setup'))
|
||||
self.revent_run_file = context.resolver.get(ReventFile(self, 'run'))
|
||||
devpath = self.device.path
|
||||
self.on_device_setup_revent = devpath.join(self.device.working_directory,
|
||||
os.path.split(self.revent_setup_file)[-1])
|
||||
self.on_device_run_revent = devpath.join(self.device.working_directory,
|
||||
os.path.split(self.revent_run_file)[-1])
|
||||
self._check_revent_files(context)
|
||||
default_setup_timeout = ceil(ReventRecording(self.revent_setup_file).duration) + 30
|
||||
default_run_timeout = ceil(ReventRecording(self.revent_run_file).duration) + 30
|
||||
self.setup_timeout = self.setup_timeout or default_setup_timeout
|
||||
self.run_timeout = self.run_timeout or default_run_timeout
|
||||
|
||||
Workload.setup(self, context)
|
||||
self.device.killall('revent')
|
||||
command = '{} replay {}'.format(self.on_device_revent_binary, self.on_device_setup_revent)
|
||||
self.device.execute(command, timeout=self.setup_timeout)
|
||||
@@ -278,6 +515,7 @@ class ReventWorkload(Workload):
|
||||
pass
|
||||
|
||||
def teardown(self, context):
|
||||
self.device.killall('revent')
|
||||
self.device.delete_file(self.on_device_setup_revent)
|
||||
self.device.delete_file(self.on_device_run_revent)
|
||||
|
||||
@@ -301,13 +539,39 @@ class ReventWorkload(Workload):
|
||||
self.device.push_file(self.revent_run_file, self.on_device_run_revent)
|
||||
self.device.push_file(self.revent_setup_file, self.on_device_setup_revent)
|
||||
|
||||
def _check_statedetection_files(self, context):
|
||||
try:
|
||||
self.statedefs_dir = context.resolver.get(File(self, 'state_definitions'))
|
||||
except ResourceError:
|
||||
self.logger.warning("State definitions directory not found. Disabling state detection.")
|
||||
self.check_states = False
|
||||
|
||||
def check_state(self, context, phase):
|
||||
try:
|
||||
self.logger.info("\tChecking workload state...")
|
||||
screenshotPath = os.path.join(context.output_directory, "screen.png")
|
||||
self.device.capture_screen(screenshotPath)
|
||||
stateCheck = state_detector.verify_state(screenshotPath, self.statedefs_dir, phase)
|
||||
if not stateCheck:
|
||||
raise WorkloadError("Unexpected state after setup")
|
||||
except state_detector.StateDefinitionError as e:
|
||||
msg = "State definitions or template files missing or invalid ({}). Skipping state detection."
|
||||
self.logger.warning(msg.format(e.message))
|
||||
|
||||
|
||||
class AndroidUiAutoBenchmark(UiAutomatorWorkload, AndroidBenchmark):
|
||||
|
||||
supported_platforms = ['android']
|
||||
|
||||
def __init__(self, device, **kwargs):
|
||||
UiAutomatorWorkload.__init__(self, device, **kwargs)
|
||||
AndroidBenchmark.__init__(self, device, _call_super=False, **kwargs)
|
||||
|
||||
def initialize(self, context):
|
||||
UiAutomatorWorkload.initialize(self, context)
|
||||
AndroidBenchmark.initialize(self, context)
|
||||
self._check_unsupported_packages()
|
||||
|
||||
def init_resources(self, context):
|
||||
UiAutomatorWorkload.init_resources(self, context)
|
||||
AndroidBenchmark.init_resources(self, context)
|
||||
@@ -324,6 +588,88 @@ class AndroidUiAutoBenchmark(UiAutomatorWorkload, AndroidBenchmark):
|
||||
UiAutomatorWorkload.teardown(self, context)
|
||||
AndroidBenchmark.teardown(self, context)
|
||||
|
||||
def _check_unsupported_packages(self):
|
||||
"""
|
||||
Check for any unsupported package versions and raise an
|
||||
exception if detected.
|
||||
|
||||
"""
|
||||
for package in UNSUPPORTED_PACKAGES:
|
||||
version = self.device.get_installed_package_version(package)
|
||||
if version is None:
|
||||
continue
|
||||
|
||||
if '-' in version:
|
||||
version = version.split('-')[0] # ignore abi version
|
||||
|
||||
if version in UNSUPPORTED_PACKAGES[package]:
|
||||
message = 'This workload does not support version "{}" of package "{}"'
|
||||
raise WorkloadError(message.format(version, package))
|
||||
|
||||
|
||||
class AndroidUxPerfWorkloadMeta(ExtensionMeta):
|
||||
to_propagate = ExtensionMeta.to_propagate + [('deployable_assets', str, ListCollection)]
|
||||
|
||||
|
||||
class AndroidUxPerfWorkload(AndroidUiAutoBenchmark):
|
||||
__metaclass__ = AndroidUxPerfWorkloadMeta
|
||||
|
||||
deployable_assets = []
|
||||
parameters = [
|
||||
Parameter('markers_enabled', kind=bool, default=False,
|
||||
description="""
|
||||
If ``True``, UX_PERF action markers will be emitted to logcat during
|
||||
the test run.
|
||||
"""),
|
||||
Parameter('clean_assets', kind=bool, default=False,
|
||||
description="""
|
||||
If ``True`` pushed assets will be deleted at the end of each iteration
|
||||
"""),
|
||||
Parameter('force_push_assets', kind=bool, default=False,
|
||||
description="""
|
||||
If ``True`` always push assets on each iteration, even if the
|
||||
assets already exists in the device path
|
||||
"""),
|
||||
]
|
||||
|
||||
def _path_on_device(self, fpath, dirname=None):
|
||||
if dirname is None:
|
||||
dirname = self.device.working_directory
|
||||
fname = os.path.basename(fpath)
|
||||
return self.device.path.join(dirname, fname)
|
||||
|
||||
def push_assets(self, context):
|
||||
for f in self.deployable_assets:
|
||||
fpath = context.resolver.get(File(self, f))
|
||||
device_path = self._path_on_device(fpath)
|
||||
if self.force_push_assets or not self.device.file_exists(device_path):
|
||||
self.device.push_file(fpath, device_path, timeout=300)
|
||||
self.device.broadcast_media_mounted(self.device.working_directory)
|
||||
|
||||
def delete_assets(self):
|
||||
for f in self.deployable_assets:
|
||||
self.device.delete_file(self._path_on_device(f))
|
||||
self.device.broadcast_media_mounted(self.device.working_directory)
|
||||
|
||||
def __init__(self, device, **kwargs):
|
||||
super(AndroidUxPerfWorkload, self).__init__(device, **kwargs)
|
||||
# Turn class attribute into instance attribute
|
||||
self.deployable_assets = list(self.deployable_assets)
|
||||
|
||||
def validate(self):
|
||||
super(AndroidUxPerfWorkload, self).validate()
|
||||
self.uiauto_params['package'] = self.package
|
||||
self.uiauto_params['markers_enabled'] = self.markers_enabled
|
||||
|
||||
def setup(self, context):
|
||||
super(AndroidUxPerfWorkload, self).setup(context)
|
||||
self.push_assets(context)
|
||||
|
||||
def teardown(self, context):
|
||||
super(AndroidUxPerfWorkload, self).teardown(context)
|
||||
if self.clean_assets:
|
||||
self.delete_assets()
|
||||
|
||||
|
||||
class GameWorkload(ApkWorkload, ReventWorkload):
|
||||
"""
|
||||
@@ -355,8 +701,22 @@ class GameWorkload(ApkWorkload, ReventWorkload):
|
||||
asset_file = None
|
||||
saved_state_file = None
|
||||
view = 'SurfaceView'
|
||||
install_timeout = 500
|
||||
loading_time = 10
|
||||
supported_platforms = ['android']
|
||||
|
||||
parameters = [
|
||||
Parameter('install_timeout', default=500, override=True),
|
||||
Parameter('check_states', kind=bool, default=False, global_alias='check_game_states',
|
||||
description="""Use visual state detection to verify the state of the workload
|
||||
after setup and run"""),
|
||||
Parameter('assets_push_timeout', kind=int, default=500,
|
||||
description='Timeout used during deployment of the assets package (if there is one).'),
|
||||
Parameter('clear_data_on_reset', kind=bool, default=True,
|
||||
description="""
|
||||
If set to ``False``, this will prevent WA from clearing package
|
||||
data for this workload prior to running it.
|
||||
"""),
|
||||
]
|
||||
|
||||
def __init__(self, device, **kwargs): # pylint: disable=W0613
|
||||
ApkWorkload.__init__(self, device, **kwargs)
|
||||
@@ -368,6 +728,8 @@ class GameWorkload(ApkWorkload, ReventWorkload):
|
||||
def init_resources(self, context):
|
||||
ApkWorkload.init_resources(self, context)
|
||||
ReventWorkload.init_resources(self, context)
|
||||
if self.check_states:
|
||||
self._check_statedetection_files(context)
|
||||
|
||||
def setup(self, context):
|
||||
ApkWorkload.setup(self, context)
|
||||
@@ -375,23 +737,33 @@ class GameWorkload(ApkWorkload, ReventWorkload):
|
||||
time.sleep(self.loading_time)
|
||||
ReventWorkload.setup(self, context)
|
||||
|
||||
# state detection check if it's enabled in the config
|
||||
if self.check_states:
|
||||
self.check_state(context, "setup_complete")
|
||||
|
||||
def do_post_install(self, context):
|
||||
ApkWorkload.do_post_install(self, context)
|
||||
self._deploy_assets(context)
|
||||
self._deploy_assets(context, self.assets_push_timeout)
|
||||
|
||||
def reset(self, context):
|
||||
# If saved state exists, restore it; if not, do full
|
||||
# uninstall/install cycle.
|
||||
self.device.execute('am force-stop {}'.format(self.package))
|
||||
if self.saved_state_file:
|
||||
self._deploy_resource_tarball(context, self.saved_state_file)
|
||||
else:
|
||||
ApkWorkload.reset(self, context)
|
||||
if self.clear_data_on_reset:
|
||||
self.device.execute('pm clear {}'.format(self.package))
|
||||
self._deploy_assets(context)
|
||||
|
||||
def run(self, context):
|
||||
ReventWorkload.run(self, context)
|
||||
|
||||
def teardown(self, context):
|
||||
# state detection check if it's enabled in the config
|
||||
if self.check_states:
|
||||
self.check_state(context, "run_complete")
|
||||
|
||||
if not self.saved_state_file:
|
||||
ApkWorkload.teardown(self, context)
|
||||
else:
|
||||
@@ -416,7 +788,7 @@ class GameWorkload(ApkWorkload, ReventWorkload):
|
||||
raise WorkloadError(message.format(resource_file, self.name))
|
||||
# adb push will create intermediate directories if they don't
|
||||
# exist.
|
||||
self.device.push_file(asset_tarball, ondevice_cache)
|
||||
self.device.push_file(asset_tarball, ondevice_cache, timeout=timeout)
|
||||
|
||||
device_asset_directory = self.device.path.join(self.device.external_storage_directory, 'Android', kind)
|
||||
deploy_command = 'cd {} && {} tar -xzf {}'.format(device_asset_directory,
|
||||
|
Binary file not shown.
BIN
wlauto/common/bin/arm64/m5
Executable file
BIN
wlauto/common/bin/arm64/m5
Executable file
Binary file not shown.
Binary file not shown.
BIN
wlauto/common/bin/arm64/sqlite3
Normal file
BIN
wlauto/common/bin/arm64/sqlite3
Normal file
Binary file not shown.
Binary file not shown.
BIN
wlauto/common/bin/armeabi/m5
Executable file
BIN
wlauto/common/bin/armeabi/m5
Executable file
Binary file not shown.
Binary file not shown.
BIN
wlauto/common/bin/armeabi/sqlite3
Normal file
BIN
wlauto/common/bin/armeabi/sqlite3
Normal file
Binary file not shown.
6
wlauto/common/gem5/LICENSE
Normal file
6
wlauto/common/gem5/LICENSE
Normal file
@@ -0,0 +1,6 @@
|
||||
The gem5 simulator can be obtained from http://repo.gem5.org/gem5/ and the
|
||||
corresponding documentation can be found at http://www.gem5.org.
|
||||
|
||||
The source for the m5 binaries bundled with Workload Automation (found at
|
||||
wlauto/common/bin/arm64/m5 and wlauto/common/bin/armeabi/m5) can be found at
|
||||
util/m5 in the gem5 source at http://repo.gem5.org/gem5/.
|
14
wlauto/common/gem5/__init__.py
Normal file
14
wlauto/common/gem5/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Copyright 2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
684
wlauto/common/gem5/device.py
Normal file
684
wlauto/common/gem5/device.py
Normal file
@@ -0,0 +1,684 @@
|
||||
# Copyright 2014-2015 ARM Limited
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# Original implementation by Rene de Jong. Updated by Sascha Bischoff.
|
||||
|
||||
# pylint: disable=E1101
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from pexpect import EOF, TIMEOUT, pxssh
|
||||
|
||||
from wlauto import settings, Parameter
|
||||
from wlauto.core.resource import NO_ONE
|
||||
from wlauto.common.resources import Executable
|
||||
from wlauto.core import signal as sig
|
||||
from wlauto.exceptions import DeviceError
|
||||
from wlauto.utils import ssh, types
|
||||
|
||||
|
||||
class BaseGem5Device(object):
|
||||
"""
|
||||
Base implementation for a gem5-based device
|
||||
|
||||
This class is used as the base class for OS-specific devices such as the
|
||||
G3m5LinuxDevice and the Gem5AndroidDevice. The majority of the gem5-specific
|
||||
functionality is included here.
|
||||
|
||||
Note: When inheriting from this class, make sure to inherit from this class
|
||||
prior to inheriting from the OS-specific class, i.e. LinuxDevice, to ensure
|
||||
that the methods are correctly overridden.
|
||||
"""
|
||||
# gem5 can be very slow. Hence, we use some very long timeouts!
|
||||
delay = 3600
|
||||
long_delay = 3 * delay
|
||||
ready_timeout = long_delay
|
||||
default_timeout = delay
|
||||
|
||||
platform = None
|
||||
path_module = 'posixpath'
|
||||
|
||||
parameters = [
|
||||
Parameter('gem5_binary', kind=str, default='./build/ARM/gem5.fast',
|
||||
mandatory=False, description="Command used to execute gem5. "
|
||||
"Adjust according to needs."),
|
||||
Parameter('gem5_args', kind=types.arguments, mandatory=True,
|
||||
description="Command line passed to the gem5 simulation. This"
|
||||
" command line is used to set up the simulated system, and "
|
||||
"should be the same as used for a standard gem5 simulation "
|
||||
"without workload automation. Note that this is simulation "
|
||||
"script specific and will hence need to be tailored to each "
|
||||
"particular use case."),
|
||||
Parameter('gem5_vio_args', kind=types.arguments, mandatory=True,
|
||||
constraint=lambda x: "{}" in str(x),
|
||||
description="gem5 VirtIO command line used to enable the "
|
||||
"VirtIO device in the simulated system. At the very least, "
|
||||
"the root parameter of the VirtIO9PDiod device must be "
|
||||
"exposed on the command line. Please set this root mount to "
|
||||
"{}, as it will be replaced with the directory used by "
|
||||
"Workload Automation at runtime."),
|
||||
Parameter('temp_dir', kind=str, default='/tmp',
|
||||
description="Temporary directory used to pass files into the "
|
||||
"gem5 simulation. Workload Automation will automatically "
|
||||
"create a directory in this folder, and will remove it again "
|
||||
"once the simulation completes."),
|
||||
Parameter('checkpoint', kind=bool, default=False,
|
||||
mandatory=False, description="This parameter "
|
||||
"tells Workload Automation to create a checkpoint of the "
|
||||
"simulated system once the guest system has finished booting."
|
||||
" This checkpoint can then be used at a later stage by other "
|
||||
"WA runs to avoid booting the guest system a second time. Set"
|
||||
" to True to take a checkpoint of the simulated system post "
|
||||
"boot."),
|
||||
Parameter('run_delay', kind=int, default=0, mandatory=False,
|
||||
constraint=lambda x: x >= 0,
|
||||
description="This sets the time that the "
|
||||
"system should sleep in the simulated system prior to "
|
||||
"running and workloads or taking checkpoints. This allows "
|
||||
"the system to quieten down prior to running the workloads. "
|
||||
"When this is combined with the checkpoint_post_boot"
|
||||
" option, it allows the checkpoint to be created post-sleep,"
|
||||
" and therefore the set of workloads resuming from this "
|
||||
"checkpoint will not be required to sleep.")
|
||||
]
|
||||
|
||||
@property
|
||||
def is_rooted(self): # pylint: disable=R0201
|
||||
# gem5 is always rooted
|
||||
return True
|
||||
|
||||
# pylint: disable=E0203
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger('gem5Device')
|
||||
|
||||
# The gem5 subprocess
|
||||
self.gem5 = None
|
||||
self.gem5_port = -1
|
||||
self.gem5outdir = os.path.join(settings.output_directory, "gem5")
|
||||
self.m5_path = 'm5'
|
||||
self.stdout_file = None
|
||||
self.stderr_file = None
|
||||
self.stderr_filename = None
|
||||
self.sckt = None
|
||||
|
||||
# Find the first one that does not exist. Ensures that we do not re-use
|
||||
# the directory used by someone else.
|
||||
for i in xrange(sys.maxint):
|
||||
directory = os.path.join(self.temp_dir, "wa_{}".format(i))
|
||||
try:
|
||||
os.stat(directory)
|
||||
continue
|
||||
except OSError:
|
||||
break
|
||||
self.temp_dir = directory
|
||||
self.logger.debug("Using {} as the temporary directory.".format(self.temp_dir))
|
||||
|
||||
# Start the gem5 simulation when WA starts a run using a signal.
|
||||
sig.connect(self.init_gem5, sig.RUN_START)
|
||||
|
||||
def validate(self):
|
||||
# Assemble the virtio args
|
||||
self.gem5_vio_args = str(self.gem5_vio_args).format(self.temp_dir) # pylint: disable=W0201
|
||||
self.logger.debug("gem5 VirtIO command: {}".format(self.gem5_vio_args))
|
||||
|
||||
def init_gem5(self, _):
|
||||
"""
|
||||
Start gem5, find out the telnet port and connect to the simulation.
|
||||
|
||||
We first create the temporary directory used by VirtIO to pass files
|
||||
into the simulation, as well as the gem5 output directory.We then create
|
||||
files for the standard output and error for the gem5 process. The gem5
|
||||
process then is started.
|
||||
"""
|
||||
self.logger.info("Creating temporary directory: {}".format(self.temp_dir))
|
||||
os.mkdir(self.temp_dir)
|
||||
os.mkdir(self.gem5outdir)
|
||||
|
||||
# We need to redirect the standard output and standard error for the
|
||||
# gem5 process to a file so that we can debug when things go wrong.
|
||||
f = os.path.join(self.gem5outdir, 'stdout')
|
||||
self.stdout_file = open(f, 'w')
|
||||
f = os.path.join(self.gem5outdir, 'stderr')
|
||||
self.stderr_file = open(f, 'w')
|
||||
# We need to keep this so we can check which port to use for the telnet
|
||||
# connection.
|
||||
self.stderr_filename = f
|
||||
|
||||
self.start_gem5()
|
||||
|
||||
def start_gem5(self):
|
||||
"""
|
||||
Starts the gem5 simulator, and parses the output to get the telnet port.
|
||||
"""
|
||||
self.logger.info("Starting the gem5 simulator")
|
||||
|
||||
command_line = "{} --outdir={}/gem5 {} {}".format(self.gem5_binary,
|
||||
settings.output_directory,
|
||||
self.gem5_args,
|
||||
self.gem5_vio_args)
|
||||
self.logger.debug("gem5 command line: {}".format(command_line))
|
||||
self.gem5 = subprocess.Popen(command_line.split(),
|
||||
stdout=self.stdout_file,
|
||||
stderr=self.stderr_file)
|
||||
|
||||
while self.gem5_port == -1:
|
||||
# Check that gem5 is running!
|
||||
if self.gem5.poll():
|
||||
raise DeviceError("The gem5 process has crashed with error code {}!".format(self.gem5.poll()))
|
||||
|
||||
# Open the stderr file
|
||||
f = open(self.stderr_filename, 'r')
|
||||
for line in f:
|
||||
m = re.search(r"Listening\ for\ system\ connection\ on\ port\ (?P<port>\d+)", line)
|
||||
if m:
|
||||
port = int(m.group('port'))
|
||||
if port >= 3456 and port < 5900:
|
||||
self.gem5_port = port
|
||||
f.close()
|
||||
break
|
||||
else:
|
||||
time.sleep(1)
|
||||
f.close()
|
||||
|
||||
def connect(self): # pylint: disable=R0912,W0201
|
||||
"""
|
||||
Connect to the gem5 simulation and wait for Android to boot. Then,
|
||||
create checkpoints, and mount the VirtIO device.
|
||||
"""
|
||||
self.connect_gem5()
|
||||
|
||||
self.wait_for_boot()
|
||||
|
||||
if self.run_delay:
|
||||
self.logger.info("Sleeping for {} seconds in the guest".format(self.run_delay))
|
||||
self.gem5_shell("sleep {}".format(self.run_delay))
|
||||
|
||||
if self.checkpoint:
|
||||
self.checkpoint_gem5()
|
||||
|
||||
self.mount_virtio()
|
||||
self.logger.info("Creating the working directory in the simulated system")
|
||||
self.gem5_shell('mkdir -p {}'.format(self.working_directory))
|
||||
self._is_ready = True # pylint: disable=W0201
|
||||
|
||||
def wait_for_boot(self):
|
||||
pass
|
||||
|
||||
def connect_gem5(self): # pylint: disable=R0912
|
||||
"""
|
||||
Connect to the telnet port of the gem5 simulation.
|
||||
|
||||
We connect, and wait for the prompt to be found. We do not use a timeout
|
||||
for this, and wait for the prompt in a while loop as the gem5 simulation
|
||||
can take many hours to reach a prompt when booting the system. We also
|
||||
inject some newlines periodically to try and force gem5 to show a
|
||||
prompt. Once the prompt has been found, we replace it with a unique
|
||||
prompt to ensure that we are able to match it properly. We also disable
|
||||
the echo as this simplifies parsing the output when executing commands
|
||||
on the device.
|
||||
"""
|
||||
self.logger.info("Connecting to the gem5 simulation on port {}".format(self.gem5_port))
|
||||
host = socket.gethostname()
|
||||
port = self.gem5_port
|
||||
|
||||
# Connect to the gem5 telnet port. Use a short timeout here.
|
||||
attempts = 0
|
||||
while attempts < 10:
|
||||
attempts += 1
|
||||
try:
|
||||
self.sckt = ssh.TelnetConnection()
|
||||
self.sckt.login(host, 'None', port=port, auto_prompt_reset=False,
|
||||
login_timeout=10)
|
||||
break
|
||||
except pxssh.ExceptionPxssh:
|
||||
pass
|
||||
else:
|
||||
self.gem5.kill()
|
||||
raise DeviceError("Failed to connect to the gem5 telnet session.")
|
||||
|
||||
self.logger.info("Connected! Waiting for prompt...")
|
||||
|
||||
# We need to find the prompt. It might be different if we are resuming
|
||||
# from a checkpoint. Therefore, we test multiple options here.
|
||||
prompt_found = False
|
||||
while not prompt_found:
|
||||
try:
|
||||
self.login_to_device()
|
||||
except TIMEOUT:
|
||||
pass
|
||||
try:
|
||||
# Try and force a prompt to be shown
|
||||
self.sckt.send('\n')
|
||||
self.sckt.expect([r'# ', self.sckt.UNIQUE_PROMPT, r'\[PEXPECT\][\\\$\#]+ '], timeout=60)
|
||||
prompt_found = True
|
||||
except TIMEOUT:
|
||||
pass
|
||||
|
||||
self.logger.info("Setting unique prompt...")
|
||||
|
||||
self.sckt.set_unique_prompt()
|
||||
self.sckt.prompt()
|
||||
self.logger.info("Prompt found and replaced with a unique string")
|
||||
|
||||
# We check that the prompt is what we think it should be. If not, we
|
||||
# need to update the regex we use to match.
|
||||
self.find_prompt()
|
||||
|
||||
self.sckt.setecho(False)
|
||||
self.sync_gem5_shell()
|
||||
self.resize_shell()
|
||||
|
||||
def get_properties(self, context): # pylint: disable=R0801
|
||||
""" Get the property files from the device """
|
||||
for propfile in self.property_files:
|
||||
try:
|
||||
normname = propfile.lstrip(self.path.sep).replace(self.path.sep, '.')
|
||||
outfile = os.path.join(context.host_working_directory, normname)
|
||||
if self.is_file(propfile):
|
||||
self.execute('cat {} > {}'.format(propfile, normname))
|
||||
self.pull_file(normname, outfile)
|
||||
elif self.is_directory(propfile):
|
||||
self.get_directory(context, propfile)
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
except DeviceError:
|
||||
# We pull these files "opportunistically", so if a pull fails
|
||||
# (e.g. we don't have permissions to read the file), just note
|
||||
# it quietly (not as an error/warning) and move on.
|
||||
self.logger.debug('Could not pull property file "{}"'.format(propfile))
|
||||
return {}
|
||||
|
||||
def get_directory(self, context, directory):
|
||||
""" Pull a directory from the device """
|
||||
normname = directory.lstrip(self.path.sep).replace(self.path.sep, '.')
|
||||
outdir = os.path.join(context.host_working_directory, normname)
|
||||
temp_file = os.path.join(context.host_working_directory, "{}.tar".format(normname))
|
||||
# Check that the folder exists
|
||||
self.gem5_shell("ls -la {}".format(directory))
|
||||
# Compress the folder
|
||||
try:
|
||||
self.gem5_shell("{} tar -cvf {}.tar {}".format(self.busybox, normname, directory))
|
||||
except DeviceError:
|
||||
self.logger.debug("Failed to run tar command on device! Not pulling {}".format(directory))
|
||||
return
|
||||
self.pull_file(normname, temp_file)
|
||||
f = tarfile.open(temp_file, 'r')
|
||||
os.mkdir(outdir)
|
||||
f.extractall(outdir)
|
||||
os.remove(temp_file)
|
||||
|
||||
def get_pids_of(self, process_name):
|
||||
""" Returns a list of PIDs of all processes with the specified name. """
|
||||
result = self.gem5_shell('ps | {} grep {}'.format(self.busybox, process_name),
|
||||
check_exit_code=False).strip()
|
||||
if result and 'not found' not in result and len(result.split('\n')) > 2:
|
||||
return [int(x.split()[1]) for x in result.split('\n')]
|
||||
else:
|
||||
return []
|
||||
|
||||
def find_prompt(self):
|
||||
prompt = r'\[PEXPECT\][\\\$\#]+ '
|
||||
synced = False
|
||||
while not synced:
|
||||
self.sckt.send('\n')
|
||||
i = self.sckt.expect([prompt, self.sckt.UNIQUE_PROMPT, r'[\$\#] '], timeout=self.delay)
|
||||
if i == 0:
|
||||
synced = True
|
||||
elif i == 1:
|
||||
prompt = self.sckt.UNIQUE_PROMPT
|
||||
synced = True
|
||||
else:
|
||||
prompt = re.sub(r'\$', r'\\\$', self.sckt.before.strip() + self.sckt.after.strip())
|
||||
prompt = re.sub(r'\#', r'\\\#', prompt)
|
||||
prompt = re.sub(r'\[', r'\[', prompt)
|
||||
prompt = re.sub(r'\]', r'\]', prompt)
|
||||
|
||||
self.sckt.PROMPT = prompt
|
||||
|
||||
def close(self):
|
||||
if self._logcat_poller:
|
||||
self._logcat_poller.stop()
|
||||
|
||||
def reset(self):
|
||||
self.logger.warn("Attempt to restart the gem5 device. This is not "
|
||||
"supported!")
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def push_file(self, source, dest, **kwargs):
|
||||
"""
|
||||
Push a file to the gem5 device using VirtIO
|
||||
|
||||
The file to push to the device is copied to the temporary directory on
|
||||
the host, before being copied within the simulation to the destination.
|
||||
Checks, in the form of 'ls' with error code checking, are performed to
|
||||
ensure that the file is copied to the destination.
|
||||
"""
|
||||
filename = os.path.basename(source)
|
||||
self.logger.debug("Pushing {} to device.".format(source))
|
||||
self.logger.debug("temp_dir: {}".format(self.temp_dir))
|
||||
self.logger.debug("dest: {}".format(dest))
|
||||
self.logger.debug("filename: {}".format(filename))
|
||||
|
||||
# We need to copy the file to copy to the temporary directory
|
||||
self.move_to_temp_dir(source)
|
||||
|
||||
# Back to the gem5 world
|
||||
self.gem5_shell("ls -al /mnt/obb/{}".format(filename))
|
||||
if self.busybox:
|
||||
self.gem5_shell("{} cp /mnt/obb/{} {}".format(self.busybox, filename, dest))
|
||||
else:
|
||||
self.gem5_shell("cat /mnt/obb/{} > {}".format(filename, dest))
|
||||
self.gem5_shell("sync")
|
||||
self.gem5_shell("ls -al {}".format(dest))
|
||||
self.gem5_shell("ls -al /mnt/obb/")
|
||||
self.logger.debug("Push complete.")
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def pull_file(self, source, dest, **kwargs):
|
||||
"""
|
||||
Pull a file from the gem5 device using m5 writefile
|
||||
|
||||
The file is copied to the local directory within the guest as the m5
|
||||
writefile command assumes that the file is local. The file is then
|
||||
written out to the host system using writefile, prior to being moved to
|
||||
the destination on the host.
|
||||
"""
|
||||
filename = os.path.basename(source)
|
||||
|
||||
self.logger.debug("pull_file {} {}".format(source, filename))
|
||||
# We don't check the exit code here because it is non-zero if the source
|
||||
# and destination are the same. The ls below will cause an error if the
|
||||
# file was not where we expected it to be.
|
||||
self.gem5_shell("{} cp {} {}".format(self.busybox, source, filename),
|
||||
check_exit_code=False)
|
||||
self.gem5_shell("sync")
|
||||
self.gem5_shell("ls -la {}".format(filename))
|
||||
self.logger.debug('Finished the copy in the simulator')
|
||||
self.gem5_util("writefile {}".format(filename))
|
||||
|
||||
if 'cpu' not in filename:
|
||||
while not os.path.exists(os.path.join(self.gem5outdir, filename)):
|
||||
time.sleep(1)
|
||||
|
||||
# Perform the local move
|
||||
shutil.move(os.path.join(self.gem5outdir, filename), dest)
|
||||
self.logger.debug("Pull complete.")
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def delete_file(self, filepath, **kwargs):
|
||||
""" Delete a file on the device """
|
||||
self._check_ready()
|
||||
self.gem5_shell("rm '{}'".format(filepath))
|
||||
|
||||
def file_exists(self, filepath):
|
||||
""" Check if a file exists """
|
||||
self._check_ready()
|
||||
output = self.gem5_shell('if [ -e \'{}\' ]; then echo 1; else echo 0; fi'.format(filepath))
|
||||
try:
|
||||
if int(output):
|
||||
return True
|
||||
except ValueError:
|
||||
# If we cannot process the output, assume that there is no file
|
||||
pass
|
||||
return False
|
||||
|
||||
def disconnect(self):
|
||||
"""
|
||||
Close and disconnect from the gem5 simulation. Additionally, we remove
|
||||
the temporary directory used to pass files into the simulation.
|
||||
"""
|
||||
self.logger.info("Gracefully terminating the gem5 simulation.")
|
||||
try:
|
||||
self.gem5_util("exit")
|
||||
self.gem5.wait()
|
||||
except EOF:
|
||||
pass
|
||||
self.logger.info("Removing the temporary directory")
|
||||
try:
|
||||
shutil.rmtree(self.temp_dir)
|
||||
except OSError:
|
||||
self.logger.warn("Failed to remove the temporary directory!")
|
||||
|
||||
# gem5 might be slow. Hence, we need to make the ping timeout very long.
|
||||
def ping(self):
|
||||
self.logger.debug("Pinging gem5 to see if it is still alive")
|
||||
self.gem5_shell('ls /', timeout=self.longdelay)
|
||||
|
||||
# Additional Android-specific methods.
|
||||
def forward_port(self, _): # pylint: disable=R0201
|
||||
raise DeviceError('we do not need forwarding')
|
||||
|
||||
# gem5 should dump out a framebuffer. We can use this if it exists. Failing
|
||||
# that, fall back to the parent class implementation.
|
||||
def capture_screen(self, filepath):
|
||||
file_list = os.listdir(self.gem5outdir)
|
||||
screen_caps = []
|
||||
for f in file_list:
|
||||
if '.bmp' in f:
|
||||
screen_caps.append(f)
|
||||
|
||||
if len(screen_caps) == 1:
|
||||
# Bail out if we do not have image, and resort to the slower, built
|
||||
# in method.
|
||||
try:
|
||||
import Image
|
||||
gem5_image = os.path.join(self.gem5outdir, screen_caps[0])
|
||||
temp_image = os.path.join(self.gem5outdir, "file.png")
|
||||
im = Image.open(gem5_image)
|
||||
im.save(temp_image, "PNG")
|
||||
shutil.copy(temp_image, filepath)
|
||||
os.remove(temp_image)
|
||||
self.logger.debug("capture_screen: using gem5 screencap")
|
||||
return True
|
||||
except (shutil.Error, ImportError, IOError):
|
||||
pass
|
||||
return False
|
||||
|
||||
# pylint: disable=W0613
|
||||
def execute(self, command, timeout=1000, check_exit_code=True, background=False,
|
||||
as_root=False, busybox=False, **kwargs):
|
||||
self._check_ready()
|
||||
if as_root and not self.is_rooted:
|
||||
raise DeviceError('Attempting to execute "{}" as root on unrooted device.'.format(command))
|
||||
if busybox:
|
||||
if not self.is_rooted:
|
||||
raise DeviceError('Attempting to execute "{}" with busybox. '.format(command) +
|
||||
'Busybox can only be deployed to rooted devices.')
|
||||
command = ' '.join([self.busybox, command])
|
||||
if background:
|
||||
self.logger.debug("Attempt to execute in background. Not supported "
|
||||
"in gem5, hence ignored.")
|
||||
return self.gem5_shell(command, as_root=as_root)
|
||||
|
||||
# Internal methods: do not use outside of the class.
|
||||
|
||||
def _check_ready(self):
|
||||
"""
|
||||
Check if the device is ready.
|
||||
|
||||
As this is gem5, we just assume that the device is ready once we have
|
||||
connected to the gem5 simulation, and updated the prompt.
|
||||
"""
|
||||
if not self._is_ready:
|
||||
raise DeviceError('Device not ready.')
|
||||
|
||||
def gem5_shell(self, command, as_root=False, timeout=None, check_exit_code=True, sync=True): # pylint: disable=R0912
|
||||
"""
|
||||
Execute a command in the gem5 shell
|
||||
|
||||
This wraps the telnet connection to gem5 and processes the raw output.
|
||||
|
||||
This method waits for the shell to return, and then will try and
|
||||
separate the output from the command from the command itself. If this
|
||||
fails, warn, but continue with the potentially wrong output.
|
||||
|
||||
The exit code is also checked by default, and non-zero exit codes will
|
||||
raise a DeviceError.
|
||||
"""
|
||||
conn = self.sckt
|
||||
if sync:
|
||||
self.sync_gem5_shell()
|
||||
|
||||
self.logger.debug("gem5_shell command: {}".format(command))
|
||||
|
||||
# Send the actual command
|
||||
conn.send("{}\n".format(command))
|
||||
|
||||
# Wait for the response. We just sit here and wait for the prompt to
|
||||
# appear, as gem5 might take a long time to provide the output. This
|
||||
# avoids timeout issues.
|
||||
command_index = -1
|
||||
while command_index == -1:
|
||||
if conn.prompt():
|
||||
output = re.sub(r' \r([^\n])', r'\1', conn.before)
|
||||
output = re.sub(r'[\b]', r'', output)
|
||||
# Deal with line wrapping
|
||||
output = re.sub(r'[\r].+?<', r'', output)
|
||||
command_index = output.find(command)
|
||||
|
||||
# If we have -1, then we cannot match the command, but the
|
||||
# prompt has returned. Hence, we have a bit of an issue. We
|
||||
# warn, and return the whole output.
|
||||
if command_index == -1:
|
||||
self.logger.warn("gem5_shell: Unable to match command in "
|
||||
"command output. Expect parsing errors!")
|
||||
command_index = 0
|
||||
|
||||
output = output[command_index + len(command):].strip()
|
||||
|
||||
# It is possible that gem5 will echo the command. Therefore, we need to
|
||||
# remove that too!
|
||||
command_index = output.find(command)
|
||||
if command_index != -1:
|
||||
output = output[command_index + len(command):].strip()
|
||||
|
||||
self.logger.debug("gem5_shell output: {}".format(output))
|
||||
|
||||
# We get a second prompt. Hence, we need to eat one to make sure that we
|
||||
# stay in sync. If we do not do this, we risk getting out of sync for
|
||||
# slower simulations.
|
||||
self.sckt.expect([self.sckt.UNIQUE_PROMPT, self.sckt.PROMPT], timeout=self.delay)
|
||||
|
||||
if check_exit_code:
|
||||
exit_code_text = self.gem5_shell('echo $?', as_root=as_root,
|
||||
timeout=timeout, check_exit_code=False,
|
||||
sync=False)
|
||||
try:
|
||||
exit_code = int(exit_code_text.split()[0])
|
||||
if exit_code:
|
||||
message = 'Got exit code {}\nfrom: {}\nOUTPUT: {}'
|
||||
raise DeviceError(message.format(exit_code, command, output))
|
||||
except (ValueError, IndexError):
|
||||
self.logger.warning('Could not get exit code for "{}",\ngot: "{}"'.format(command, exit_code_text))
|
||||
|
||||
return output
|
||||
|
||||
def gem5_util(self, command):
|
||||
""" Execute a gem5 utility command using the m5 binary on the device """
|
||||
self.gem5_shell('{} {}'.format(self.m5_path, command))
|
||||
|
||||
def sync_gem5_shell(self):
|
||||
"""
|
||||
Synchronise with the gem5 shell.
|
||||
|
||||
Write some unique text to the gem5 device to allow us to synchronise
|
||||
with the shell output. We actually get two prompts so we need to match
|
||||
both of these.
|
||||
"""
|
||||
self.logger.debug("Sending Sync")
|
||||
self.sckt.send("echo \*\*sync\*\*\n")
|
||||
self.sckt.expect(r"\*\*sync\*\*", timeout=self.delay)
|
||||
self.sckt.expect([self.sckt.UNIQUE_PROMPT, self.sckt.PROMPT], timeout=self.delay)
|
||||
self.sckt.expect([self.sckt.UNIQUE_PROMPT, self.sckt.PROMPT], timeout=self.delay)
|
||||
|
||||
def resize_shell(self):
|
||||
"""
|
||||
Resize the shell to avoid line wrapping issues.
|
||||
|
||||
"""
|
||||
# Try and avoid line wrapping as much as possible. Don't check the error
|
||||
# codes from these command because some of them WILL fail.
|
||||
self.gem5_shell('stty columns 1024', check_exit_code=False)
|
||||
self.gem5_shell('{} stty columns 1024'.format(self.busybox), check_exit_code=False)
|
||||
self.gem5_shell('stty cols 1024', check_exit_code=False)
|
||||
self.gem5_shell('{} stty cols 1024'.format(self.busybox), check_exit_code=False)
|
||||
self.gem5_shell('reset', check_exit_code=False)
|
||||
|
||||
def move_to_temp_dir(self, source):
|
||||
"""
|
||||
Move a file to the temporary directory on the host for copying to the
|
||||
gem5 device
|
||||
"""
|
||||
command = "cp {} {}".format(source, self.temp_dir)
|
||||
self.logger.debug("Local copy command: {}".format(command))
|
||||
subprocess.call(command.split())
|
||||
subprocess.call("sync".split())
|
||||
|
||||
def checkpoint_gem5(self, end_simulation=False):
|
||||
""" Checkpoint the gem5 simulation, storing all system state """
|
||||
self.logger.info("Taking a post-boot checkpoint")
|
||||
self.gem5_util("checkpoint")
|
||||
if end_simulation:
|
||||
self.disconnect()
|
||||
|
||||
def mount_virtio(self):
|
||||
"""
|
||||
Mount the VirtIO device in the simulated system.
|
||||
"""
|
||||
self.logger.info("Mounting VirtIO device in simulated system")
|
||||
|
||||
self.gem5_shell('mkdir -p /mnt/obb')
|
||||
|
||||
mount_command = "mount -t 9p -o trans=virtio,version=9p2000.L,aname={} gem5 /mnt/obb".format(self.temp_dir)
|
||||
self.gem5_shell(mount_command)
|
||||
|
||||
def deploy_m5(self, context, force=False):
|
||||
"""
|
||||
Deploys the m5 binary to the device and returns the path to the binary
|
||||
on the device.
|
||||
|
||||
:param force: by default, if the binary is already present on the
|
||||
device, it will not be deployed again. Setting force to
|
||||
``True`` overrides that behaviour and ensures that the
|
||||
binary is always copied. Defaults to ``False``.
|
||||
|
||||
:returns: The on-device path to the m5 binary.
|
||||
|
||||
"""
|
||||
on_device_executable = self.path.join(self.binaries_directory, 'm5')
|
||||
if not force and self.file_exists(on_device_executable):
|
||||
# We want to check the version of the binary. We cannot directly
|
||||
# check this because the m5 binary itself is unversioned. We also
|
||||
# need to make sure not to check the error code as "m5 --help"
|
||||
# returns a non-zero error code.
|
||||
output = self.gem5_shell('m5 --help', check_exit_code=False)
|
||||
if "writefile" in output:
|
||||
self.logger.debug("Using the m5 binary on the device...")
|
||||
self.m5_path = on_device_executable
|
||||
return on_device_executable
|
||||
else:
|
||||
self.logger.debug("m5 on device does not support writefile!")
|
||||
host_file = context.resolver.get(Executable(NO_ONE, self.abi, 'm5'))
|
||||
self.logger.info("Installing the m5 binary to the device...")
|
||||
self.m5_path = self.install(host_file)
|
||||
return self.m5_path
|
@@ -12,5 +12,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -40,6 +40,23 @@ reboot_policy = 'as_needed'
|
||||
# random: Randomisizes the order in which specs run. #
|
||||
execution_order = 'by_iteration'
|
||||
|
||||
|
||||
# This indicates when a job will be re-run.
|
||||
# Possible values:
|
||||
# OK: This iteration has completed and no errors have been detected
|
||||
# PARTIAL: One or more instruments have failed (the iteration may still be running).
|
||||
# FAILED: The workload itself has failed.
|
||||
# ABORTED: The user interupted the workload
|
||||
#
|
||||
# If set to an empty list, a job will not be re-run ever.
|
||||
retry_on_status = ['FAILED', 'PARTIAL']
|
||||
|
||||
# How many times a job will be re-run before giving up
|
||||
max_retries = 3
|
||||
|
||||
# If WA should delete its files from the device after the run is completed
|
||||
clean_up = False
|
||||
|
||||
####################################################################################################
|
||||
######################################### Device Settings ##########################################
|
||||
####################################################################################################
|
||||
@@ -120,6 +137,9 @@ instrumentation = [
|
||||
# Specifies how results will be processed and presented. #
|
||||
# #
|
||||
result_processors = [
|
||||
# Creates a status.txt that provides a summary status for the run
|
||||
'status',
|
||||
|
||||
# Creates a results.txt file for each iteration that lists all collected metrics
|
||||
# in "name = value (units)" format
|
||||
'standard',
|
||||
@@ -132,7 +152,7 @@ result_processors = [
|
||||
# all in the .csv format. Summary metrics are defined on per-worklod basis
|
||||
# are typically things like overall scores. The contents of summary.csv are
|
||||
# always a subset of the contents of results.csv (if it is generated).
|
||||
'summary_csv',
|
||||
#'summary_csv',
|
||||
|
||||
# Creates a results.csv that contains metrics for all iterations of all workloads
|
||||
# in the JSON format
|
||||
@@ -172,7 +192,7 @@ logging = {
|
||||
####################################################################################################
|
||||
#################################### Instruments Configuration #####################################
|
||||
####################################################################################################
|
||||
# Instrumention Configuration is related to specific insturment's settings. Some of the #
|
||||
# Instrumention Configuration is related to specific instrument's settings. Some of the #
|
||||
# instrumentations require specific settings in order for them to work. These settings are #
|
||||
# specified here. #
|
||||
# Note that these settings only take effect if the corresponding instrument is
|
||||
@@ -195,18 +215,6 @@ logging = {
|
||||
# The kinds of sensors hwmon instrument will look for
|
||||
#hwmon_sensors = ['energy', 'temp']
|
||||
|
||||
####################################################################################################
|
||||
##################################### streamline configuration #####################################
|
||||
|
||||
# The port number on which gatord will listen
|
||||
#port = 8080
|
||||
|
||||
# Enabling/disabling the run of 'streamline -analyze' on the captured data.
|
||||
#streamline_analyze = True
|
||||
|
||||
# Enabling/disabling the generation of a CSV report
|
||||
#streamline_report_csv = True
|
||||
|
||||
####################################################################################################
|
||||
###################################### trace-cmd configuration #####################################
|
||||
|
||||
@@ -217,10 +225,10 @@ logging = {
|
||||
####################################################################################################
|
||||
######################################### DAQ configuration ########################################
|
||||
|
||||
# The host address of the machine that runs the daq Server which the insturment communicates with
|
||||
# The host address of the machine that runs the daq Server which the instrument communicates with
|
||||
#daq_server_host = '10.1.17.56'
|
||||
|
||||
# The port number for daq Server in which daq insturment communicates with
|
||||
# The port number for daq Server in which daq instrument communicates with
|
||||
#daq_server_port = 56788
|
||||
|
||||
# The values of resistors 1 and 2 (in Ohms) across which the voltages are measured
|
||||
|
@@ -12,5 +12,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
|
@@ -16,13 +16,12 @@
|
||||
import os
|
||||
from copy import copy
|
||||
from collections import OrderedDict, defaultdict
|
||||
import yaml
|
||||
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.utils.misc import load_struct_from_yaml, LoadSyntaxError
|
||||
from wlauto.utils.types import counter, reset_counter
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
def get_aliased_param(d, aliases, default=None, pop=True):
|
||||
alias_map = [i for i, a in enumerate(aliases) if a in d]
|
||||
@@ -70,6 +69,7 @@ class AgendaWorkloadEntry(AgendaEntry):
|
||||
default=OrderedDict())
|
||||
self.instrumentation = kwargs.pop('instrumentation', [])
|
||||
self.flash = kwargs.pop('flash', OrderedDict())
|
||||
self.classifiers = kwargs.pop('classifiers', OrderedDict())
|
||||
if kwargs:
|
||||
raise ConfigError('Invalid entry(ies) in workload {}: {}'.format(self.id, ', '.join(kwargs.keys())))
|
||||
|
||||
@@ -96,6 +96,7 @@ class AgendaSectionEntry(AgendaEntry):
|
||||
default=OrderedDict())
|
||||
self.instrumentation = kwargs.pop('instrumentation', [])
|
||||
self.flash = kwargs.pop('flash', OrderedDict())
|
||||
self.classifiers = kwargs.pop('classifiers', OrderedDict())
|
||||
self.workloads = []
|
||||
for w in kwargs.pop('workloads', []):
|
||||
self.workloads.append(agenda.get_workload_entry(w))
|
||||
@@ -128,6 +129,7 @@ class AgendaGlobalEntry(AgendaEntry):
|
||||
default=OrderedDict())
|
||||
self.instrumentation = kwargs.pop('instrumentation', [])
|
||||
self.flash = kwargs.pop('flash', OrderedDict())
|
||||
self.classifiers = kwargs.pop('classifiers', OrderedDict())
|
||||
if kwargs:
|
||||
raise ConfigError('Invalid entries in global section: {}'.format(kwargs))
|
||||
|
||||
@@ -136,7 +138,7 @@ class Agenda(object):
|
||||
|
||||
def __init__(self, source=None):
|
||||
self.filepath = None
|
||||
self.config = None
|
||||
self.config = {}
|
||||
self.global_ = None
|
||||
self.sections = []
|
||||
self.workloads = []
|
||||
@@ -161,13 +163,22 @@ class Agenda(object):
|
||||
self._assign_id_if_needed(w, 'workload')
|
||||
return AgendaWorkloadEntry(**w)
|
||||
|
||||
def _load(self, source):
|
||||
raw = self._load_raw_from_source(source)
|
||||
def _load(self, source): # pylint: disable=too-many-branches
|
||||
try:
|
||||
raw = self._load_raw_from_source(source)
|
||||
except ValueError as e:
|
||||
name = getattr(source, 'name', '')
|
||||
raise ConfigError('Error parsing agenda {}: {}'.format(name, e))
|
||||
if not isinstance(raw, dict):
|
||||
message = '{} does not contain a valid agenda structure; top level must be a dict.'
|
||||
raise ConfigError(message.format(self.filepath))
|
||||
for k, v in raw.iteritems():
|
||||
if v is None:
|
||||
raise ConfigError('Empty "{}" entry in {}'.format(k, self.filepath))
|
||||
|
||||
if k == 'config':
|
||||
if not isinstance(v, dict):
|
||||
raise ConfigError('Invalid agenda: "config" entry must be a dict')
|
||||
self.config = v
|
||||
elif k == 'global':
|
||||
self.global_ = AgendaGlobalEntry(**v)
|
||||
@@ -237,7 +248,13 @@ def dict_representer(dumper, data):
|
||||
|
||||
|
||||
def dict_constructor(loader, node):
|
||||
return OrderedDict(loader.construct_pairs(node))
|
||||
pairs = loader.construct_pairs(node)
|
||||
seen_keys = set()
|
||||
for k, _ in pairs:
|
||||
if k in seen_keys:
|
||||
raise ValueError('Duplicate entry: {}'.format(k))
|
||||
seen_keys.add(k)
|
||||
return OrderedDict(pairs)
|
||||
|
||||
|
||||
yaml.add_representer(OrderedDict, dict_representer)
|
||||
|
@@ -16,13 +16,13 @@
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import imp
|
||||
import sys
|
||||
import re
|
||||
from collections import namedtuple, OrderedDict
|
||||
|
||||
from wlauto.exceptions import ConfigError
|
||||
from wlauto.utils.misc import merge_dicts, normalize, unique
|
||||
from wlauto.utils.misc import load_struct_from_yaml, load_struct_from_python, LoadSyntaxError
|
||||
from wlauto.utils.types import identifier
|
||||
|
||||
|
||||
@@ -76,7 +76,7 @@ class ConfigLoader(object):
|
||||
self._loaded = False
|
||||
self._config = {}
|
||||
self.config_count = 0
|
||||
self._loaded_files = []
|
||||
self.loaded_files = []
|
||||
self.environment_root = None
|
||||
self.output_directory = 'wa_output'
|
||||
self.reboot_after_each_iteration = True
|
||||
@@ -106,14 +106,22 @@ class ConfigLoader(object):
|
||||
self.update_from_file(source)
|
||||
|
||||
def update_from_file(self, source):
|
||||
ext = os.path.splitext(source)[1].lower() # pylint: disable=redefined-outer-name
|
||||
try:
|
||||
new_config = imp.load_source('config_{}'.format(self.config_count), source)
|
||||
except SyntaxError, e:
|
||||
message = 'Sytax error in config: {}'.format(str(e))
|
||||
raise ConfigError(message)
|
||||
self._config = merge_dicts(self._config, vars(new_config),
|
||||
list_duplicates='first', match_types=False, dict_type=OrderedDict)
|
||||
self._loaded_files.append(source)
|
||||
if ext in ['.py', '.pyo', '.pyc']:
|
||||
new_config = load_struct_from_python(source)
|
||||
elif ext == '.yaml':
|
||||
new_config = load_struct_from_yaml(source)
|
||||
else:
|
||||
raise ConfigError('Unknown config format: {}'.format(source))
|
||||
except (LoadSyntaxError, ValueError) as e:
|
||||
raise ConfigError('Invalid config "{}":\n\t{}'.format(source, e))
|
||||
|
||||
self._config = merge_dicts(self._config, new_config,
|
||||
list_duplicates='first',
|
||||
match_types=False,
|
||||
dict_type=OrderedDict)
|
||||
self.loaded_files.append(source)
|
||||
self._loaded = True
|
||||
|
||||
def update_from_dict(self, source):
|
||||
@@ -123,7 +131,7 @@ class ConfigLoader(object):
|
||||
self._loaded = True
|
||||
|
||||
def get_config_paths(self):
|
||||
return [lf.rstrip('c') for lf in self._loaded_files]
|
||||
return [lf.rstrip('c') for lf in self.loaded_files]
|
||||
|
||||
def _check_loaded(self):
|
||||
if not self._loaded:
|
||||
@@ -151,33 +159,44 @@ def init_environment(env_root, dep_dir, extension_paths, overwrite_existing=Fals
|
||||
for path in extension_paths:
|
||||
os.makedirs(path)
|
||||
|
||||
# If running with sudo on POSIX, change the ownership to the real user.
|
||||
real_user = os.getenv('SUDO_USER')
|
||||
if real_user:
|
||||
import pwd # done here as module won't import on win32
|
||||
user_entry = pwd.getpwnam(real_user)
|
||||
uid, gid = user_entry.pw_uid, user_entry.pw_gid
|
||||
os.chown(env_root, uid, gid)
|
||||
# why, oh why isn't there a recusive=True option for os.chown?
|
||||
for root, dirs, files in os.walk(env_root):
|
||||
for d in dirs:
|
||||
os.chown(os.path.join(root, d), uid, gid)
|
||||
for f in files: # pylint: disable=W0621
|
||||
os.chown(os.path.join(root, f), uid, gid)
|
||||
if os.getenv('USER') == 'root':
|
||||
# If running with sudo on POSIX, change the ownership to the real user.
|
||||
real_user = os.getenv('SUDO_USER')
|
||||
if real_user:
|
||||
import pwd # done here as module won't import on win32
|
||||
user_entry = pwd.getpwnam(real_user)
|
||||
uid, gid = user_entry.pw_uid, user_entry.pw_gid
|
||||
os.chown(env_root, uid, gid)
|
||||
# why, oh why isn't there a recusive=True option for os.chown?
|
||||
for root, dirs, files in os.walk(env_root):
|
||||
for d in dirs:
|
||||
os.chown(os.path.join(root, d), uid, gid)
|
||||
for f in files: # pylint: disable=W0621
|
||||
os.chown(os.path.join(root, f), uid, gid)
|
||||
|
||||
|
||||
_env_root = os.getenv('WA_USER_DIRECTORY', os.path.join(_user_home, '.workload_automation'))
|
||||
_dep_dir = os.path.join(_env_root, 'dependencies')
|
||||
_extension_paths = [os.path.join(_env_root, ext.default_path) for ext in _extensions]
|
||||
_extension_paths.extend(os.getenv('WA_EXTENSION_PATHS', '').split(os.pathsep))
|
||||
_env_var_paths = os.getenv('WA_EXTENSION_PATHS', '')
|
||||
if _env_var_paths:
|
||||
_extension_paths.extend(_env_var_paths.split(os.pathsep))
|
||||
|
||||
_env_configs = []
|
||||
for filename in ['config.py', 'config.yaml']:
|
||||
filepath = os.path.join(_env_root, filename)
|
||||
if os.path.isfile(filepath):
|
||||
_env_configs.append(filepath)
|
||||
|
||||
if not os.path.isdir(_env_root):
|
||||
init_environment(_env_root, _dep_dir, _extension_paths)
|
||||
elif not os.path.isfile(os.path.join(_env_root, 'config.py')):
|
||||
elif not _env_configs:
|
||||
filepath = os.path.join(_env_root, 'config.py')
|
||||
with open(os.path.join(_this_dir, '..', 'config_example.py')) as f:
|
||||
f_text = re.sub(r'""".*?"""', '', f.read(), 1, re.DOTALL)
|
||||
with open(os.path.join(_env_root, 'config.py'), 'w') as f:
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(f_text)
|
||||
_env_configs.append(filepath)
|
||||
|
||||
settings = ConfigLoader()
|
||||
settings.environment_root = _env_root
|
||||
@@ -190,6 +209,5 @@ if os.path.isfile(_packages_file):
|
||||
with open(_packages_file) as fh:
|
||||
settings.extension_packages = unique(fh.read().split())
|
||||
|
||||
_env_config = os.path.join(settings.environment_root, 'config.py')
|
||||
settings.update(_env_config)
|
||||
|
||||
for config in _env_configs:
|
||||
settings.update(config)
|
||||
|
@@ -45,12 +45,12 @@ class Command(Extension):
|
||||
parser_params['formatter_class'] = self.formatter_class
|
||||
self.parser = subparsers.add_parser(self.name, **parser_params)
|
||||
init_argument_parser(self.parser) # propagate top-level options
|
||||
self.initialize()
|
||||
self.initialize(None)
|
||||
|
||||
def initialize(self):
|
||||
def initialize(self, context):
|
||||
"""
|
||||
Perform command-specific initialisation (e.g. adding command-specific options to the command's
|
||||
parser).
|
||||
parser). ``context`` is always ``None``.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
@@ -77,6 +77,7 @@ class WorkloadRunSpec(object):
|
||||
runtime_parameters=None,
|
||||
instrumentation=None,
|
||||
flash=None,
|
||||
classifiers=None,
|
||||
): # pylint: disable=W0622
|
||||
self.id = id
|
||||
self.number_of_iterations = number_of_iterations
|
||||
@@ -88,6 +89,7 @@ class WorkloadRunSpec(object):
|
||||
self.workload_parameters = workload_parameters or OrderedDict()
|
||||
self.instrumentation = instrumentation or []
|
||||
self.flash = flash or OrderedDict()
|
||||
self.classifiers = classifiers or OrderedDict()
|
||||
self._workload = None
|
||||
self._section = None
|
||||
self.enabled = True
|
||||
@@ -96,7 +98,7 @@ class WorkloadRunSpec(object):
|
||||
if param in ['id', 'section_id', 'number_of_iterations', 'workload_name', 'label']:
|
||||
if value is not None:
|
||||
setattr(self, param, value)
|
||||
elif param in ['boot_parameters', 'runtime_parameters', 'workload_parameters', 'flash']:
|
||||
elif param in ['boot_parameters', 'runtime_parameters', 'workload_parameters', 'flash', 'classifiers']:
|
||||
setattr(self, param, merge_dicts(getattr(self, param), value, list_duplicates='last',
|
||||
dict_type=OrderedDict, should_normalize=False))
|
||||
elif param in ['instrumentation']:
|
||||
@@ -155,6 +157,23 @@ class WorkloadRunSpec(object):
|
||||
del d['_section']
|
||||
return d
|
||||
|
||||
def copy(self):
|
||||
other = WorkloadRunSpec()
|
||||
other.id = self.id
|
||||
other.number_of_iterations = self.number_of_iterations
|
||||
other.workload_name = self.workload_name
|
||||
other.label = self.label
|
||||
other.section_id = self.section_id
|
||||
other.boot_parameters = copy(self.boot_parameters)
|
||||
other.runtime_parameters = copy(self.runtime_parameters)
|
||||
other.workload_parameters = copy(self.workload_parameters)
|
||||
other.instrumentation = copy(self.instrumentation)
|
||||
other.flash = copy(self.flash)
|
||||
other.classifiers = copy(self.classifiers)
|
||||
other._section = self._section # pylint: disable=protected-access
|
||||
other.enabled = self.enabled
|
||||
return other
|
||||
|
||||
def __str__(self):
|
||||
return '{} {}'.format(self.id, self.label)
|
||||
|
||||
@@ -292,6 +311,12 @@ def _combine_ids(*args):
|
||||
return '_'.join(args)
|
||||
|
||||
|
||||
class status_list(list):
|
||||
|
||||
def append(self, item):
|
||||
list.append(self, str(item).upper())
|
||||
|
||||
|
||||
class RunConfiguration(object):
|
||||
"""
|
||||
Loads and maintains the unified configuration for this run. This includes configuration
|
||||
@@ -400,7 +425,7 @@ class RunConfiguration(object):
|
||||
is validated (to make sure there are no missing settings, etc).
|
||||
- Extensions are loaded through the run config object, which instantiates
|
||||
them with appropriate parameters based on the "raw" config collected earlier. When an
|
||||
Extension is instantiated in such a way, it's config is "officially" added to run configuration
|
||||
Extension is instantiated in such a way, its config is "officially" added to run configuration
|
||||
tracked by the run config object. Raw config is discarded at the end of the run, so
|
||||
that any config that wasn't loaded in this way is not recoded (as it was not actually used).
|
||||
- Extension parameters a validated individually (for type, value ranges, etc) as they are
|
||||
@@ -454,6 +479,9 @@ class RunConfiguration(object):
|
||||
RunConfigurationItem('reboot_policy', 'scalar', 'replace'),
|
||||
RunConfigurationItem('device', 'scalar', 'replace'),
|
||||
RunConfigurationItem('flashing_config', 'dict', 'replace'),
|
||||
RunConfigurationItem('retry_on_status', 'list', 'replace'),
|
||||
RunConfigurationItem('max_retries', 'scalar', 'replace'),
|
||||
RunConfigurationItem('clean_up', 'scalar', 'replace'),
|
||||
]
|
||||
|
||||
# Configuration specified for each workload spec. "workload_parameters"
|
||||
@@ -468,11 +496,12 @@ class RunConfiguration(object):
|
||||
RunConfigurationItem('runtime_parameters', 'dict', 'merge'),
|
||||
RunConfigurationItem('instrumentation', 'list', 'merge'),
|
||||
RunConfigurationItem('flash', 'dict', 'merge'),
|
||||
RunConfigurationItem('classifiers', 'dict', 'merge'),
|
||||
]
|
||||
|
||||
# List of names that may be present in configuration (and it is valid for
|
||||
# them to be there) but are not handled buy RunConfiguration.
|
||||
ignore_names = ['logging']
|
||||
ignore_names = ['logging', 'remote_assets_mount_point']
|
||||
|
||||
def get_reboot_policy(self):
|
||||
if not self._reboot_policy:
|
||||
@@ -507,6 +536,8 @@ class RunConfiguration(object):
|
||||
self.workload_specs = []
|
||||
self.flashing_config = {}
|
||||
self.other_config = {} # keeps track of used config for extensions other than of the four main kinds.
|
||||
self.retry_on_status = status_list(['FAILED', 'PARTIAL'])
|
||||
self.max_retries = 3
|
||||
self._used_config_items = []
|
||||
self._global_instrumentation = []
|
||||
self._reboot_policy = None
|
||||
@@ -639,7 +670,7 @@ class RunConfiguration(object):
|
||||
for param, ext in ga.iteritems():
|
||||
for name in [ext.name] + [a.name for a in ext.aliases]:
|
||||
self._load_default_config_if_necessary(name)
|
||||
self._raw_config[name][param.name] = value
|
||||
self._raw_config[identifier(name)][param.name] = value
|
||||
|
||||
def _set_run_config_item(self, name, value):
|
||||
item = self._general_config_map[name]
|
||||
@@ -653,12 +684,12 @@ class RunConfiguration(object):
|
||||
def _set_raw_dict(self, name, value, default_config=None):
|
||||
existing_config = self._raw_config.get(name, default_config or {})
|
||||
new_config = _merge_config_dicts(existing_config, value)
|
||||
self._raw_config[name] = new_config
|
||||
self._raw_config[identifier(name)] = new_config
|
||||
|
||||
def _set_raw_list(self, name, value):
|
||||
old_value = self._raw_config.get(name, [])
|
||||
new_value = merge_lists(old_value, value, duplicates='last')
|
||||
self._raw_config[name] = new_value
|
||||
self._raw_config[identifier(name)] = new_value
|
||||
|
||||
def _finalize_config_list(self, attr_name):
|
||||
"""Note: the name is somewhat misleading. This finalizes a list
|
||||
@@ -668,18 +699,21 @@ class RunConfiguration(object):
|
||||
raw_list = self._raw_config.get(attr_name, [])
|
||||
for extname in raw_list:
|
||||
default_config = self.ext_loader.get_default_config(extname)
|
||||
ext_config[extname] = self._raw_config.get(extname, default_config)
|
||||
ext_config[extname] = self._raw_config.get(identifier(extname), default_config)
|
||||
list_name = '_global_{}'.format(attr_name)
|
||||
setattr(self, list_name, raw_list)
|
||||
global_list = self._raw_config.get(list_name, [])
|
||||
setattr(self, list_name, global_list)
|
||||
setattr(self, attr_name, ext_config)
|
||||
|
||||
def _finalize_device_config(self):
|
||||
self._load_default_config_if_necessary(self.device)
|
||||
config = _merge_config_dicts(self._raw_config.get(self.device),
|
||||
self._raw_config.get('device_config', {}))
|
||||
config = _merge_config_dicts(self._raw_config.get(self.device, {}),
|
||||
self._raw_config.get('device_config', {}),
|
||||
list_duplicates='all')
|
||||
self.device_config = config
|
||||
|
||||
def _load_default_config_if_necessary(self, name):
|
||||
name = identifier(name)
|
||||
if name not in self._raw_config:
|
||||
self._raw_config[name] = self.ext_loader.get_default_config(name)
|
||||
|
||||
@@ -724,7 +758,7 @@ class RunConfiguration(object):
|
||||
if spec.match_selectors(selectors):
|
||||
instrumentation_config = self._raw_config['instrumentation']
|
||||
for instname in spec.instrumentation:
|
||||
if instname not in instrumentation_config:
|
||||
if instname not in instrumentation_config and not instname.startswith('~'):
|
||||
instrumentation_config.append(instname)
|
||||
self.workload_specs.append(spec)
|
||||
|
||||
|
@@ -35,8 +35,9 @@ from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
|
||||
from wlauto.core.extension import Extension, ExtensionMeta, AttributeCollection, Parameter
|
||||
from wlauto.core.extension_loader import ExtensionLoader
|
||||
from wlauto.exceptions import DeviceError, ConfigError
|
||||
from wlauto.utils.types import list_of_strings, list_of_integers
|
||||
from wlauto.utils.types import list_of_integers, list_of, caseless_string
|
||||
|
||||
|
||||
__all__ = ['RuntimeParameter', 'CoreParameter', 'Device', 'DeviceMeta']
|
||||
@@ -93,10 +94,34 @@ class CoreParameter(RuntimeParameter):
|
||||
return params
|
||||
|
||||
|
||||
class DynamicModuleSpec(dict):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.keys()[0]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
dict.__init__(self)
|
||||
if args:
|
||||
if len(args) > 1:
|
||||
raise ValueError(args)
|
||||
value = args[0]
|
||||
else:
|
||||
value = kwargs
|
||||
if isinstance(value, basestring):
|
||||
self[value] = {}
|
||||
elif isinstance(value, dict) and len(value) == 1:
|
||||
for k, v in value.iteritems():
|
||||
self[k] = v
|
||||
else:
|
||||
raise ValueError(value)
|
||||
|
||||
|
||||
class DeviceMeta(ExtensionMeta):
|
||||
|
||||
to_propagate = ExtensionMeta.to_propagate + [
|
||||
('runtime_parameters', RuntimeParameter, AttributeCollection),
|
||||
('dynamic_modules', DynamicModuleSpec, AttributeCollection),
|
||||
]
|
||||
|
||||
|
||||
@@ -137,7 +162,7 @@ class Device(Extension):
|
||||
__metaclass__ = DeviceMeta
|
||||
|
||||
parameters = [
|
||||
Parameter('core_names', kind=list_of_strings, mandatory=True, default=None,
|
||||
Parameter('core_names', kind=list_of(caseless_string), mandatory=True, default=None,
|
||||
description="""
|
||||
This is a list of all cpu cores on the device with each
|
||||
element being the core type, e.g. ``['a7', 'a7', 'a15']``. The
|
||||
@@ -149,12 +174,17 @@ class Device(Extension):
|
||||
description="""
|
||||
This is a list indicating the cluster affinity of the CPU cores,
|
||||
each element correponding to the cluster ID of the core coresponding
|
||||
to it's index. E.g. ``[0, 0, 1]`` indicates that cpu0 and cpu1 are on
|
||||
cluster 0, while cpu2 is on cluster 1.
|
||||
to its index. E.g. ``[0, 0, 1]`` indicates that cpu0 and cpu1 are on
|
||||
cluster 0, while cpu2 is on cluster 1. If this is not specified, this
|
||||
will be inferred from ``core_names`` if possible (assuming all cores with
|
||||
the same name are on the same cluster).
|
||||
"""),
|
||||
]
|
||||
|
||||
runtime_parameters = []
|
||||
# dynamic modules are loaded or not based on whether the device supports
|
||||
# them (established at runtime by module probling the device).
|
||||
dynamic_modules = []
|
||||
|
||||
# These must be overwritten by subclasses.
|
||||
name = None
|
||||
@@ -177,6 +207,36 @@ class Device(Extension):
|
||||
except IOError:
|
||||
raise DeviceError('Unsupported path module: {}'.format(self.path_module))
|
||||
|
||||
def validate(self):
|
||||
# pylint: disable=access-member-before-definition,attribute-defined-outside-init
|
||||
if self.core_names and not self.core_clusters:
|
||||
self.core_clusters = []
|
||||
clusters = []
|
||||
for cn in self.core_names:
|
||||
if cn not in clusters:
|
||||
clusters.append(cn)
|
||||
self.core_clusters.append(clusters.index(cn))
|
||||
if len(self.core_names) != len(self.core_clusters):
|
||||
raise ConfigError('core_names and core_clusters are of different lengths.')
|
||||
|
||||
def initialize(self, context):
|
||||
"""
|
||||
Initialization that is performed at the begining of the run (after the device has
|
||||
been connecte).
|
||||
|
||||
"""
|
||||
loader = ExtensionLoader()
|
||||
for module_spec in self.dynamic_modules:
|
||||
module = self._load_module(loader, module_spec)
|
||||
if not hasattr(module, 'probe'):
|
||||
message = 'Module {} does not have "probe" attribute; cannot be loaded dynamically'
|
||||
raise ValueError(message.format(module.name))
|
||||
if module.probe(self):
|
||||
self.logger.debug('Installing module "{}"'.format(module.name))
|
||||
self._install_module(module)
|
||||
else:
|
||||
self.logger.debug('Module "{}" is not supported by the device'.format(module.name))
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
Initiate rebooting of the device.
|
||||
@@ -208,35 +268,6 @@ class Device(Extension):
|
||||
""" Close the established connection to the device. """
|
||||
raise NotImplementedError()
|
||||
|
||||
def initialize(self, context, *args, **kwargs):
|
||||
"""
|
||||
Default implementation just calls through to init(). May be overriden by specialised
|
||||
abstract sub-cleasses to implent platform-specific intialization without requiring
|
||||
concrete implementations to explicitly invoke parent's init().
|
||||
|
||||
Added in version 2.1.3.
|
||||
|
||||
"""
|
||||
self.init(context, *args, **kwargs)
|
||||
|
||||
def init(self, context, *args, **kwargs):
|
||||
"""
|
||||
Initialize the device. This method *must* be called after a device reboot before
|
||||
any other commands can be issued, however it may also be called without rebooting.
|
||||
|
||||
It is up to device-specific implementations to identify what initialisation needs
|
||||
to be preformed on a particular invocation. Bear in mind that no assumptions can be
|
||||
made about the state of the device prior to the initiation of workload execution,
|
||||
so full initialisation must be performed at least once, even if no reboot has occurred.
|
||||
After that, the device-specific implementation may choose to skip initialization if
|
||||
the device has not been rebooted; it is up to the implementation to keep track of
|
||||
that, however.
|
||||
|
||||
All arguments are device-specific (see the documentation for the your device).
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def ping(self):
|
||||
"""
|
||||
This must return successfully if the device is able to receive commands, or must
|
||||
@@ -269,14 +300,15 @@ class Device(Extension):
|
||||
runtime_parameters = self._expand_runtime_parameters()
|
||||
rtp_map = {rtp.name.lower(): rtp for rtp in runtime_parameters}
|
||||
|
||||
params = OrderedDict((k.lower(), v) for k, v in params.iteritems())
|
||||
params = OrderedDict((k.lower(), v) for k, v in params.iteritems() if v is not None)
|
||||
|
||||
expected_keys = rtp_map.keys()
|
||||
if not set(params.keys()) <= set(expected_keys):
|
||||
if not set(params.keys()).issubset(set(expected_keys)):
|
||||
unknown_params = list(set(params.keys()).difference(set(expected_keys)))
|
||||
raise ConfigError('Unknown runtime parameter(s): {}'.format(unknown_params))
|
||||
|
||||
for param in params:
|
||||
self.logger.debug('Setting runtime parameter "{}"'.format(param))
|
||||
rtp = rtp_map[param]
|
||||
setter = getattr(self, rtp.setter)
|
||||
args = dict(rtp.setter_args.items() + [(rtp.value_name, params[rtp.name.lower()])])
|
||||
@@ -394,6 +426,13 @@ class Device(Extension):
|
||||
"""
|
||||
pass
|
||||
|
||||
def is_network_connected(self):
|
||||
"""
|
||||
Checks if the device is connected to the internet
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __str__(self):
|
||||
return 'Device<{}>'.format(self.name)
|
||||
|
||||
@@ -415,4 +454,3 @@ class Device(Extension):
|
||||
except Exception as e:
|
||||
self.ping()
|
||||
raise e
|
||||
|
||||
|
@@ -17,17 +17,19 @@
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import warnings
|
||||
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.core.extension_loader import ExtensionLoader
|
||||
from wlauto.exceptions import WAError
|
||||
from wlauto.exceptions import WAError, ConfigError
|
||||
from wlauto.utils.misc import get_traceback
|
||||
from wlauto.utils.log import init_logging
|
||||
from wlauto.utils.cli import init_argument_parser
|
||||
from wlauto.utils.doc import format_body
|
||||
|
||||
|
||||
import warnings
|
||||
warnings.filterwarnings(action='ignore', category=UserWarning, module='zope')
|
||||
|
||||
|
||||
@@ -55,6 +57,8 @@ def main():
|
||||
settings.verbosity = args.verbose
|
||||
settings.debug = args.debug
|
||||
if args.config:
|
||||
if not os.path.exists(args.config):
|
||||
raise ConfigError("Config file {} not found".format(args.config))
|
||||
settings.update(args.config)
|
||||
init_logging(settings.verbosity)
|
||||
|
||||
@@ -64,12 +68,27 @@ def main():
|
||||
except KeyboardInterrupt:
|
||||
logging.info('Got CTRL-C. Aborting.')
|
||||
sys.exit(3)
|
||||
except WAError, e:
|
||||
except WAError as e:
|
||||
logging.critical(e)
|
||||
sys.exit(1)
|
||||
except Exception, e: # pylint: disable=broad-except
|
||||
except subprocess.CalledProcessError as e:
|
||||
tb = get_traceback()
|
||||
logging.critical(tb)
|
||||
command = e.cmd
|
||||
if e.args:
|
||||
command = '{} {}'.format(command, ' '.join(e.args))
|
||||
message = 'Command \'{}\' returned non-zero exit status {}\nOUTPUT:\n{}\n'
|
||||
logging.critical(message.format(command, e.returncode, e.output))
|
||||
sys.exit(2)
|
||||
except SyntaxError as e:
|
||||
tb = get_traceback()
|
||||
logging.critical(tb)
|
||||
message = 'Syntax Error in {}, line {}, offset {}:'
|
||||
logging.critical(message.format(e.filename, e.lineno, e.offset))
|
||||
logging.critical('\t{}'.format(e.msg))
|
||||
sys.exit(2)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
tb = get_traceback()
|
||||
logging.critical(tb)
|
||||
logging.critical('{}({})'.format(e.__class__.__name__, e))
|
||||
sys.exit(2)
|
||||
|
||||
|
@@ -56,7 +56,8 @@ from wlauto.core.extension_loader import ExtensionLoader
|
||||
from wlauto.core.resolver import ResourceResolver
|
||||
from wlauto.core.result import ResultManager, IterationResult, RunResult
|
||||
from wlauto.exceptions import (WAError, ConfigError, TimeoutError, InstrumentError,
|
||||
DeviceError, DeviceNotRespondingError)
|
||||
DeviceError, DeviceNotRespondingError, ResourceError,
|
||||
HostError)
|
||||
from wlauto.utils.misc import ensure_directory_exists as _d, get_traceback, merge_dicts, format_duration
|
||||
|
||||
|
||||
@@ -72,7 +73,7 @@ REBOOT_DELAY = 3
|
||||
|
||||
class RunInfo(object):
|
||||
"""
|
||||
Information about the current run, such as it's unique ID, run
|
||||
Information about the current run, such as its unique ID, run
|
||||
time, etc.
|
||||
|
||||
"""
|
||||
@@ -85,7 +86,8 @@ class RunInfo(object):
|
||||
self.duration = None
|
||||
self.project = config.project
|
||||
self.project_stage = config.project_stage
|
||||
self.run_name = config.run_name
|
||||
self.run_name = config.run_name or "{}_{}".format(os.path.split(settings.output_directory)[1],
|
||||
datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S"))
|
||||
self.notes = None
|
||||
self.device_properties = {}
|
||||
|
||||
@@ -123,6 +125,12 @@ class ExecutionContext(object):
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def job_status(self):
|
||||
if not self.current_job:
|
||||
return None
|
||||
return self.current_job.result.status
|
||||
|
||||
@property
|
||||
def workload(self):
|
||||
return getattr(self.spec, 'workload', None)
|
||||
@@ -133,7 +141,7 @@ class ExecutionContext(object):
|
||||
|
||||
@property
|
||||
def result(self):
|
||||
return getattr(self.current_job, 'result', None)
|
||||
return getattr(self.current_job, 'result', self.run_result)
|
||||
|
||||
def __init__(self, device, config):
|
||||
self.device = device
|
||||
@@ -151,6 +159,7 @@ class ExecutionContext(object):
|
||||
self.run_artifacts = copy(self.default_run_artifacts)
|
||||
self.job_iteration_counts = defaultdict(int)
|
||||
self.aborted = False
|
||||
self.runner = None
|
||||
if settings.agenda:
|
||||
self.run_artifacts.append(Artifact('agenda',
|
||||
os.path.join(self.host_working_directory,
|
||||
@@ -158,10 +167,12 @@ class ExecutionContext(object):
|
||||
'meta',
|
||||
mandatory=True,
|
||||
description='Agenda for this run.'))
|
||||
for i in xrange(1, settings.config_count + 1):
|
||||
self.run_artifacts.append(Artifact('config_{}'.format(i),
|
||||
os.path.join(self.host_working_directory,
|
||||
'config_{}.py'.format(i)),
|
||||
for i, filepath in enumerate(settings.loaded_files, 1):
|
||||
name = 'config_{}'.format(i)
|
||||
path = os.path.join(self.host_working_directory,
|
||||
name + os.path.splitext(filepath)[1])
|
||||
self.run_artifacts.append(Artifact(name,
|
||||
path,
|
||||
kind='meta',
|
||||
mandatory=True,
|
||||
description='Config file used for the run.'))
|
||||
@@ -172,17 +183,18 @@ class ExecutionContext(object):
|
||||
self.output_directory = self.run_output_directory
|
||||
self.resolver = ResourceResolver(self.config)
|
||||
self.run_info = RunInfo(self.config)
|
||||
self.run_result = RunResult(self.run_info)
|
||||
self.run_result = RunResult(self.run_info, self.run_output_directory)
|
||||
|
||||
def next_job(self, job):
|
||||
"""Invoked by the runner when starting a new iteration of workload execution."""
|
||||
self.current_job = job
|
||||
self.job_iteration_counts[self.spec.id] += 1
|
||||
self.current_job.result.iteration = self.current_iteration
|
||||
if not self.aborted:
|
||||
outdir_name = '_'.join(map(str, [self.spec.label, self.spec.id, self.current_iteration]))
|
||||
self.output_directory = _d(os.path.join(self.run_output_directory, outdir_name))
|
||||
self.iteration_artifacts = [wa for wa in self.workload.artifacts]
|
||||
self.current_job.result.iteration = self.current_iteration
|
||||
self.current_job.result.output_directory = self.output_directory
|
||||
|
||||
def end_job(self):
|
||||
if self.current_job.result.status == IterationResult.ABORTED:
|
||||
@@ -190,6 +202,12 @@ class ExecutionContext(object):
|
||||
self.current_job = None
|
||||
self.output_directory = self.run_output_directory
|
||||
|
||||
def add_metric(self, *args, **kwargs):
|
||||
self.result.add_metric(*args, **kwargs)
|
||||
|
||||
def add_classifiers(self, **kwargs):
|
||||
self.result.classifiers.update(kwargs)
|
||||
|
||||
def add_artifact(self, name, path, kind, *args, **kwargs):
|
||||
if self.current_job is None:
|
||||
self.add_run_artifact(name, path, kind, *args, **kwargs)
|
||||
@@ -327,6 +345,11 @@ class Executor(object):
|
||||
runner = self._get_runner(result_manager)
|
||||
runner.init_queue(self.config.workload_specs)
|
||||
runner.run()
|
||||
|
||||
if getattr(self.config, "clean_up", False):
|
||||
self.logger.info('Clearing WA files from device')
|
||||
self.device.delete_file(self.device.binaries_directory)
|
||||
self.device.delete_file(self.device.working_directory)
|
||||
self.execute_postamble()
|
||||
|
||||
def execute_postamble(self):
|
||||
@@ -388,8 +411,9 @@ class RunnerJob(object):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
def __init__(self, spec, retry=0):
|
||||
self.spec = spec
|
||||
self.retry = retry
|
||||
self.iteration = None
|
||||
self.result = IterationResult(self.spec)
|
||||
|
||||
@@ -410,6 +434,10 @@ class Runner(object):
|
||||
"""Internal runner error."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
return self.context.config
|
||||
|
||||
@property
|
||||
def current_job(self):
|
||||
if self.job_queue:
|
||||
@@ -477,7 +505,7 @@ class Runner(object):
|
||||
if self.context.reboot_policy.can_reboot and self.device.can('reset_power'):
|
||||
self.logger.info('Attempting to hard-reset the device...')
|
||||
try:
|
||||
self.device.hard_reset()
|
||||
self.device.boot(hard=True)
|
||||
self.device.connect()
|
||||
except DeviceError: # hard_boot not implemented for the device.
|
||||
raise e
|
||||
@@ -510,17 +538,16 @@ class Runner(object):
|
||||
self._send(signal.RUN_END)
|
||||
|
||||
def _initialize_run(self):
|
||||
self.context.runner = self
|
||||
self.context.run_info.start_time = datetime.utcnow()
|
||||
if self.context.reboot_policy.perform_initial_boot:
|
||||
self.logger.info('\tBooting device')
|
||||
with self._signal_wrap('INITIAL_BOOT'):
|
||||
self._reboot_device()
|
||||
else:
|
||||
self.logger.info('Connecting to device')
|
||||
self.device.connect()
|
||||
self._connect_to_device()
|
||||
self.logger.info('Initializing device')
|
||||
self.device.initialize(self.context)
|
||||
|
||||
self.logger.info('Initializing workloads')
|
||||
for workload_spec in self.context.config.workload_specs:
|
||||
workload_spec.workload.initialize(self.context)
|
||||
|
||||
props = self.device.get_properties(self.context)
|
||||
self.context.run_info.device_properties = props
|
||||
self.result_manager.initialize(self.context)
|
||||
@@ -529,6 +556,25 @@ class Runner(object):
|
||||
if instrumentation.check_failures():
|
||||
raise InstrumentError('Detected failure(s) during instrumentation initialization.')
|
||||
|
||||
def _connect_to_device(self):
|
||||
if self.context.reboot_policy.perform_initial_boot:
|
||||
try:
|
||||
self.device.connect()
|
||||
except DeviceError: # device may be offline
|
||||
if self.device.can('reset_power'):
|
||||
with self._signal_wrap('INITIAL_BOOT'):
|
||||
self.device.boot(hard=True)
|
||||
else:
|
||||
raise DeviceError('Cannot connect to device for initial reboot; '
|
||||
'and device does not support hard reset.')
|
||||
else: # successfully connected
|
||||
self.logger.info('\tBooting device')
|
||||
with self._signal_wrap('INITIAL_BOOT'):
|
||||
self._reboot_device()
|
||||
else:
|
||||
self.logger.info('Connecting to device')
|
||||
self.device.connect()
|
||||
|
||||
def _init_job(self):
|
||||
self.current_job.result.status = IterationResult.RUNNING
|
||||
self.context.next_job(self.current_job)
|
||||
@@ -592,11 +638,23 @@ class Runner(object):
|
||||
|
||||
def _finalize_job(self):
|
||||
self.context.run_result.iteration_results.append(self.current_job.result)
|
||||
self.job_queue[0].iteration = self.context.current_iteration
|
||||
self.completed_jobs.append(self.job_queue.pop(0))
|
||||
job = self.job_queue.pop(0)
|
||||
job.iteration = self.context.current_iteration
|
||||
if job.result.status in self.config.retry_on_status:
|
||||
if job.retry >= self.config.max_retries:
|
||||
self.logger.error('Exceeded maxium number of retries. Abandoning job.')
|
||||
else:
|
||||
self.logger.info('Job status was {}. Retrying...'.format(job.result.status))
|
||||
retry_job = RunnerJob(job.spec, job.retry + 1)
|
||||
self.job_queue.insert(0, retry_job)
|
||||
self.completed_jobs.append(job)
|
||||
self.context.end_job()
|
||||
|
||||
def _finalize_run(self):
|
||||
self.logger.info('Finalizing workloads')
|
||||
for workload_spec in self.context.config.workload_specs:
|
||||
workload_spec.workload.finalize(self.context)
|
||||
|
||||
self.logger.info('Finalizing.')
|
||||
self._send(signal.RUN_FIN)
|
||||
|
||||
@@ -679,6 +737,13 @@ class Runner(object):
|
||||
filepath = os.path.join(settings.output_directory, filename)
|
||||
self.device.capture_screen(filepath)
|
||||
|
||||
def _take_uiautomator_dump(self, filename):
|
||||
if self.context.output_directory:
|
||||
filepath = os.path.join(self.context.output_directory, filename)
|
||||
else:
|
||||
filepath = os.path.join(settings.output_directory, filename)
|
||||
self.device.capture_ui_hierarchy(filepath)
|
||||
|
||||
@contextmanager
|
||||
def _handle_errors(self, action, on_error_status=IterationResult.FAILED):
|
||||
try:
|
||||
@@ -692,15 +757,21 @@ class Runner(object):
|
||||
if self.current_job:
|
||||
self.current_job.result.status = on_error_status
|
||||
self.current_job.result.add_event(str(we))
|
||||
try:
|
||||
self._take_screenshot('error.png')
|
||||
except Exception, e: # pylint: disable=W0703
|
||||
# We're already in error state, so the fact that taking a
|
||||
# screenshot failed is not surprising...
|
||||
pass
|
||||
|
||||
# There is no point in taking a screenshot ect if the issue is not
|
||||
# with the device but with the host or a missing resource
|
||||
if not (isinstance(we, ResourceError) or isinstance(we, HostError)):
|
||||
try:
|
||||
self._take_screenshot('error.png')
|
||||
if self.device.platform == 'android':
|
||||
self._take_uiautomator_dump('error.uix')
|
||||
except Exception, e: # pylint: disable=W0703
|
||||
# We're already in error state, so the fact that taking a
|
||||
# screenshot failed is not surprising...
|
||||
pass
|
||||
if action:
|
||||
action = action[0].lower() + action[1:]
|
||||
self.logger.error('Error while {}:\n\t{}'.format(action, we))
|
||||
self.logger.error('Error while {}:\n\t{}'.format(action, str(we).replace("\n", "\n\t")))
|
||||
except Exception, e: # pylint: disable=W0703
|
||||
error_text = '{}("{}")'.format(e.__class__.__name__, e)
|
||||
if self.current_job:
|
||||
|
@@ -24,7 +24,7 @@ from collections import OrderedDict
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.exceptions import ValidationError, ConfigError
|
||||
from wlauto.utils.misc import isiterable, ensure_directory_exists as _d, get_article
|
||||
from wlauto.utils.types import identifier
|
||||
from wlauto.utils.types import identifier, integer, boolean
|
||||
|
||||
|
||||
class AttributeCollection(object):
|
||||
@@ -41,9 +41,10 @@ class AttributeCollection(object):
|
||||
def values(self):
|
||||
return self._attrs.values()
|
||||
|
||||
def __init__(self, attrcls):
|
||||
def __init__(self, attrcls, owner):
|
||||
self._attrcls = attrcls
|
||||
self._attrs = OrderedDict()
|
||||
self.owner = owner
|
||||
|
||||
def add(self, p):
|
||||
p = self._to_attrcls(p)
|
||||
@@ -53,6 +54,8 @@ class AttributeCollection(object):
|
||||
for a, v in p.__dict__.iteritems():
|
||||
if v is not None:
|
||||
setattr(newp, a, v)
|
||||
if not hasattr(newp, "_overridden"):
|
||||
newp._overridden = self.owner # pylint: disable=protected-access
|
||||
self._attrs[p.name] = newp
|
||||
else:
|
||||
# Duplicate attribute condition is check elsewhere.
|
||||
@@ -82,7 +85,12 @@ class AttributeCollection(object):
|
||||
return p
|
||||
|
||||
def __iadd__(self, other):
|
||||
other = [self._to_attrcls(p) for p in other]
|
||||
names = []
|
||||
for p in other:
|
||||
if p.name in names:
|
||||
raise ValueError("Duplicate '{}' {}".format(p.name, p.__class__.__name__.split('.')[-1]))
|
||||
names.append(p.name)
|
||||
self.add(p)
|
||||
return self
|
||||
|
||||
@@ -102,7 +110,7 @@ class AttributeCollection(object):
|
||||
class AliasCollection(AttributeCollection):
|
||||
|
||||
def __init__(self):
|
||||
super(AliasCollection, self).__init__(Alias)
|
||||
super(AliasCollection, self).__init__(Alias, None)
|
||||
|
||||
def _to_attrcls(self, p):
|
||||
if isinstance(p, tuple) or isinstance(p, list):
|
||||
@@ -117,8 +125,9 @@ class AliasCollection(AttributeCollection):
|
||||
|
||||
class ListCollection(list):
|
||||
|
||||
def __init__(self, attrcls): # pylint: disable=unused-argument
|
||||
def __init__(self, attrcls, owner): # pylint: disable=unused-argument
|
||||
super(ListCollection, self).__init__()
|
||||
self.owner = owner
|
||||
|
||||
|
||||
class Param(object):
|
||||
@@ -128,8 +137,14 @@ class Param(object):
|
||||
|
||||
"""
|
||||
|
||||
# Mapping for kind conversion; see docs for convert_types below
|
||||
kind_map = {
|
||||
int: integer,
|
||||
bool: boolean,
|
||||
}
|
||||
|
||||
def __init__(self, name, kind=None, mandatory=None, default=None, override=False,
|
||||
allowed_values=None, description=None, constraint=None, global_alias=None):
|
||||
allowed_values=None, description=None, constraint=None, global_alias=None, convert_types=True):
|
||||
"""
|
||||
Create a new Parameter object.
|
||||
|
||||
@@ -139,9 +154,7 @@ class Param(object):
|
||||
:param kind: The type of parameter this is. This must be a callable that takes an arbitrary
|
||||
object and converts it to the expected type, or raised ``ValueError`` if such
|
||||
conversion is not possible. Most Python standard types -- ``str``, ``int``, ``bool``, etc. --
|
||||
can be used here (though for ``bool``, ``wlauto.utils.misc.as_bool`` is preferred
|
||||
as it intuitively handles strings like ``'false'``). This defaults to ``str`` if
|
||||
not specified.
|
||||
can be used here. This defaults to ``str`` if not specified.
|
||||
:param mandatory: If set to ``True``, then a non-``None`` value for this parameter *must* be
|
||||
provided on extension object construction, otherwise ``ConfigError`` will be
|
||||
raised.
|
||||
@@ -164,10 +177,17 @@ class Param(object):
|
||||
that old extension settings names still work. This should not be used for
|
||||
new parameters.
|
||||
|
||||
:param convert_types: If ``True`` (the default), will automatically convert ``kind`` values from
|
||||
native Python types to WA equivalents. This allows more ituitive interprestation
|
||||
of parameter values, e.g. the string ``"false"`` being interpreted as ``False``
|
||||
when specifed as the value for a boolean Parameter.
|
||||
|
||||
"""
|
||||
self.name = identifier(name)
|
||||
if kind is not None and not callable(kind):
|
||||
raise ValueError('Kind must be callable.')
|
||||
if convert_types and kind in self.kind_map:
|
||||
kind = self.kind_map[kind]
|
||||
self.kind = kind
|
||||
self.mandatory = mandatory
|
||||
self.default = default
|
||||
@@ -204,18 +224,11 @@ class Param(object):
|
||||
else:
|
||||
new_value = current_value + [value]
|
||||
setattr(obj, self.name, new_value)
|
||||
|
||||
def validate(self, obj):
|
||||
value = getattr(obj, self.name, None)
|
||||
if value is not None:
|
||||
if self.allowed_values:
|
||||
self._validate_allowed_values(obj, value)
|
||||
if self.constraint:
|
||||
self._validate_constraint(obj, value)
|
||||
else:
|
||||
if self.mandatory:
|
||||
msg = 'No value specified for mandatory parameter {} in {}.'
|
||||
raise ConfigError(msg.format(self.name, obj.name))
|
||||
|
||||
def get_type_name(self):
|
||||
typename = str(self.kind)
|
||||
@@ -292,7 +305,7 @@ class Artifact(object):
|
||||
network filer archiver may choose to archive them).
|
||||
|
||||
.. note: The kind parameter is intended to represent the logical function of a particular
|
||||
artifact, not it's intended means of processing -- this is left entirely up to the
|
||||
artifact, not its intended means of processing -- this is left entirely up to the
|
||||
result processors.
|
||||
|
||||
"""
|
||||
@@ -381,34 +394,42 @@ class ExtensionMeta(type):
|
||||
('core_modules', str, ListCollection),
|
||||
]
|
||||
|
||||
virtual_methods = ['validate']
|
||||
virtual_methods = ['validate', 'initialize', 'finalize']
|
||||
global_virtuals = ['initialize', 'finalize']
|
||||
|
||||
def __new__(mcs, clsname, bases, attrs):
|
||||
mcs._propagate_attributes(bases, attrs)
|
||||
mcs._propagate_attributes(bases, attrs, clsname)
|
||||
cls = type.__new__(mcs, clsname, bases, attrs)
|
||||
mcs._setup_aliases(cls)
|
||||
mcs._implement_virtual(cls, bases)
|
||||
return cls
|
||||
|
||||
@classmethod
|
||||
def _propagate_attributes(mcs, bases, attrs):
|
||||
def _propagate_attributes(mcs, bases, attrs, clsname):
|
||||
"""
|
||||
For attributes specified by to_propagate, their values will be a union of
|
||||
that specified for cls and it's bases (cls values overriding those of bases
|
||||
that specified for cls and its bases (cls values overriding those of bases
|
||||
in case of conflicts).
|
||||
|
||||
"""
|
||||
for prop_attr, attr_cls, attr_collector_cls in mcs.to_propagate:
|
||||
should_propagate = False
|
||||
propagated = attr_collector_cls(attr_cls)
|
||||
propagated = attr_collector_cls(attr_cls, clsname)
|
||||
for base in bases:
|
||||
if hasattr(base, prop_attr):
|
||||
propagated += getattr(base, prop_attr) or []
|
||||
should_propagate = True
|
||||
if prop_attr in attrs:
|
||||
propagated += attrs[prop_attr] or []
|
||||
pattrs = attrs[prop_attr] or []
|
||||
propagated += pattrs
|
||||
should_propagate = True
|
||||
if should_propagate:
|
||||
for p in propagated:
|
||||
override = bool(getattr(p, "override", None))
|
||||
overridden = bool(getattr(p, "_overridden", None))
|
||||
if override != overridden:
|
||||
msg = "Overriding non existing parameter '{}' inside '{}'"
|
||||
raise ValueError(msg.format(p.name, clsname))
|
||||
attrs[prop_attr] = propagated
|
||||
|
||||
@classmethod
|
||||
@@ -430,13 +451,13 @@ class ExtensionMeta(type):
|
||||
|
||||
super(cls, self).vmname()
|
||||
|
||||
.. note:: current implementation imposes a restriction in that
|
||||
parameters into the function *must* be passed as keyword
|
||||
arguments. There *must not* be positional arguments on
|
||||
virutal method invocation.
|
||||
This also ensures that the methods that have beend identified as
|
||||
"globally virtual" are executed exactly once per WA execution, even if
|
||||
invoked through instances of different subclasses
|
||||
|
||||
"""
|
||||
methods = {}
|
||||
called_globals = set()
|
||||
for vmname in mcs.virtual_methods:
|
||||
clsmethod = getattr(cls, vmname, None)
|
||||
if clsmethod:
|
||||
@@ -444,11 +465,24 @@ class ExtensionMeta(type):
|
||||
methods[vmname] = [bm for bm in basemethods if bm != clsmethod]
|
||||
methods[vmname].append(clsmethod)
|
||||
|
||||
def wrapper(self, __name=vmname, **kwargs):
|
||||
for dm in methods[__name]:
|
||||
dm(self, **kwargs)
|
||||
def generate_method_wrapper(vname): # pylint: disable=unused-argument
|
||||
# this creates a closure with the method name so that it
|
||||
# does not need to be passed to the wrapper as an argument,
|
||||
# leaving the wrapper to accept exactly the same set of
|
||||
# arguments as the method it is wrapping.
|
||||
name__ = vmname # pylint: disable=cell-var-from-loop
|
||||
|
||||
setattr(cls, vmname, wrapper)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
for dm in methods[name__]:
|
||||
if name__ in mcs.global_virtuals:
|
||||
if dm not in called_globals:
|
||||
dm(self, *args, **kwargs)
|
||||
called_globals.add(dm)
|
||||
else:
|
||||
dm(self, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
setattr(cls, vmname, generate_method_wrapper(vmname))
|
||||
|
||||
|
||||
class Extension(object):
|
||||
@@ -526,7 +560,15 @@ class Extension(object):
|
||||
if self.name is None:
|
||||
raise ValidationError('Name not set for {}'.format(self._classname))
|
||||
for param in self.parameters:
|
||||
param.validate(self)
|
||||
if param.mandatory and getattr(self, param.name, None) is None:
|
||||
msg = 'No value specified for mandatory parameter {} in {}.'
|
||||
raise ConfigError(msg.format(param.name, self.name))
|
||||
|
||||
def initialize(self, context):
|
||||
pass
|
||||
|
||||
def finalize(self, context):
|
||||
pass
|
||||
|
||||
def check_artifacts(self, context, level):
|
||||
"""
|
||||
@@ -567,28 +609,8 @@ class Extension(object):
|
||||
for module_spec in modules:
|
||||
if not module_spec:
|
||||
continue
|
||||
if isinstance(module_spec, basestring):
|
||||
name = module_spec
|
||||
params = {}
|
||||
elif isinstance(module_spec, dict):
|
||||
if len(module_spec) != 1:
|
||||
message = 'Invalid module spec: {}; dict must have exctly one key -- the module name.'
|
||||
raise ValueError(message.format(module_spec))
|
||||
name, params = module_spec.items()[0]
|
||||
else:
|
||||
message = 'Invalid module spec: {}; must be a string or a one-key dict.'
|
||||
raise ValueError(message.format(module_spec))
|
||||
|
||||
if not isinstance(params, dict):
|
||||
message = 'Invalid module spec: {}; dict value must also be a dict.'
|
||||
raise ValueError(message.format(module_spec))
|
||||
|
||||
module = loader.get_module(name, owner=self, **params)
|
||||
module.initialize()
|
||||
for capability in module.capabilities:
|
||||
if capability not in self.capabilities:
|
||||
self.capabilities.append(capability)
|
||||
self._modules.append(module)
|
||||
module = self._load_module(loader, module_spec)
|
||||
self._install_module(module)
|
||||
|
||||
def has(self, capability):
|
||||
"""Check if this extension has the specified capability. The alternative method ``can`` is
|
||||
@@ -598,6 +620,33 @@ class Extension(object):
|
||||
|
||||
can = has
|
||||
|
||||
def _load_module(self, loader, module_spec):
|
||||
if isinstance(module_spec, basestring):
|
||||
name = module_spec
|
||||
params = {}
|
||||
elif isinstance(module_spec, dict):
|
||||
if len(module_spec) != 1:
|
||||
message = 'Invalid module spec: {}; dict must have exctly one key -- the module name.'
|
||||
raise ValueError(message.format(module_spec))
|
||||
name, params = module_spec.items()[0]
|
||||
else:
|
||||
message = 'Invalid module spec: {}; must be a string or a one-key dict.'
|
||||
raise ValueError(message.format(module_spec))
|
||||
|
||||
if not isinstance(params, dict):
|
||||
message = 'Invalid module spec: {}; dict value must also be a dict.'
|
||||
raise ValueError(message.format(module_spec))
|
||||
|
||||
module = loader.get_module(name, owner=self, **params)
|
||||
module.initialize(None)
|
||||
return module
|
||||
|
||||
def _install_module(self, module):
|
||||
for capability in module.capabilities:
|
||||
if capability not in self.capabilities:
|
||||
self.capabilities.append(capability)
|
||||
self._modules.append(module)
|
||||
|
||||
def __check_from_loader(self):
|
||||
"""
|
||||
There are a few things that need to happen in order to get a valide extension instance.
|
||||
@@ -627,7 +676,7 @@ class Module(Extension):
|
||||
In other words, a Module is roughly equivalent to a kernel module and its primary purpose is to
|
||||
implement WA "drivers" for various peripherals that may or may not be present in a particular setup.
|
||||
|
||||
.. note:: A mudule is itself an Extension and can therefore have it's own modules.
|
||||
.. note:: A mudule is itself an Extension and can therefore have its own modules.
|
||||
|
||||
"""
|
||||
|
||||
@@ -647,6 +696,5 @@ class Module(Extension):
|
||||
if owner.name == self.name:
|
||||
raise ValueError('Circular module import for {}'.format(self.name))
|
||||
|
||||
def initialize(self):
|
||||
def initialize(self, context):
|
||||
pass
|
||||
|
||||
|
@@ -80,8 +80,8 @@ class GlobalParameterAlias(object):
|
||||
other_param.kind != param.kind):
|
||||
message = 'Duplicate global alias {} declared in {} and {} extensions with different types'
|
||||
raise LoaderError(message.format(self.name, ext.name, other_ext.name))
|
||||
if not param.name == other_param.name:
|
||||
message = 'Two params {} in {} and {} in {} both declare global alias {}'
|
||||
if param.kind != other_param.kind:
|
||||
message = 'Two params {} in {} and {} in {} both declare global alias {}, and are of different kinds'
|
||||
raise LoaderError(message.format(param.name, ext.name,
|
||||
other_param.name, other_ext.name, self.name))
|
||||
|
||||
@@ -304,14 +304,14 @@ class ExtensionLoader(object):
|
||||
for module in walk_modules(package):
|
||||
self._load_module(module)
|
||||
except ImportError as e:
|
||||
message = 'Problem loading extensions from extra packages: {}'
|
||||
raise LoaderError(message.format(e.message))
|
||||
message = 'Problem loading extensions from package {}: {}'
|
||||
raise LoaderError(message.format(package, e.message))
|
||||
|
||||
def _load_from_paths(self, paths, ignore_paths):
|
||||
self.logger.debug('Loading from paths.')
|
||||
for path in paths:
|
||||
self.logger.debug('Checking path %s', path)
|
||||
for root, _, files in os.walk(path):
|
||||
for root, _, files in os.walk(path, followlinks=True):
|
||||
should_skip = False
|
||||
for igpath in ignore_paths:
|
||||
if root.startswith(igpath):
|
||||
@@ -320,7 +320,7 @@ class ExtensionLoader(object):
|
||||
if should_skip:
|
||||
continue
|
||||
for fname in files:
|
||||
if not os.path.splitext(fname)[1].lower() == '.py':
|
||||
if os.path.splitext(fname)[1].lower() != '.py':
|
||||
continue
|
||||
filepath = os.path.join(root, fname)
|
||||
try:
|
||||
@@ -333,6 +333,9 @@ class ExtensionLoader(object):
|
||||
self.logger.warn('Got: {}'.format(e))
|
||||
else:
|
||||
raise LoaderError('Failed to load {}'.format(filepath), sys.exc_info())
|
||||
except Exception as e:
|
||||
message = 'Problem loading extensions from {}: {}'
|
||||
raise LoaderError(message.format(filepath, e))
|
||||
|
||||
def _load_module(self, module): # NOQA pylint: disable=too-many-branches
|
||||
self.logger.debug('Checking module %s', module.__name__)
|
||||
@@ -371,9 +374,10 @@ class ExtensionLoader(object):
|
||||
store = self._get_store(ext)
|
||||
store[key] = obj
|
||||
for alias in obj.aliases:
|
||||
if alias in self.extensions or alias in self.aliases:
|
||||
alias_id = identifier(alias.name)
|
||||
if alias_id in self.extensions or alias_id in self.aliases:
|
||||
raise LoaderError('{} {} already exists.'.format(ext.name, obj.name))
|
||||
self.aliases[alias.name] = alias
|
||||
self.aliases[alias_id] = alias
|
||||
|
||||
# Update global aliases list. If a global alias is already in the list,
|
||||
# then make sure this extension is in the same parent/child hierarchy
|
||||
@@ -397,4 +401,3 @@ def _instantiate(cls, args=None, kwargs=None):
|
||||
return cls(*args, **kwargs)
|
||||
except Exception:
|
||||
raise LoaderError('Could not load {}'.format(cls), sys.exc_info())
|
||||
|
||||
|
@@ -32,4 +32,3 @@ def get_extension_type(ext):
|
||||
if isinstance(ext, cls):
|
||||
return name
|
||||
raise ValueError('Unknown extension type: {}'.format(ext.__class__.__name__))
|
||||
|
||||
|
@@ -106,6 +106,7 @@ import wlauto.core.signal as signal
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.exceptions import WAError, DeviceNotRespondingError, TimeoutError
|
||||
from wlauto.utils.misc import get_traceback, isiterable
|
||||
from wlauto.utils.types import identifier
|
||||
|
||||
|
||||
logger = logging.getLogger('instrumentation')
|
||||
@@ -191,11 +192,23 @@ def is_installed(instrument):
|
||||
if instrument in [i.__class__ for i in installed]:
|
||||
return True
|
||||
else: # assume string
|
||||
if instrument in [i.name for i in installed]:
|
||||
if identifier(instrument) in [identifier(i.name) for i in installed]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_enabled(instrument):
|
||||
if isinstance(instrument, Instrument) or isinstance(instrument, type):
|
||||
name = instrument.name
|
||||
else: # assume string
|
||||
name = instrument
|
||||
try:
|
||||
installed_instrument = get_instrument(name)
|
||||
return installed_instrument.is_enabled
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
failures_detected = False
|
||||
|
||||
|
||||
@@ -228,7 +241,7 @@ class ManagedCallback(object):
|
||||
except (KeyboardInterrupt, DeviceNotRespondingError, TimeoutError): # pylint: disable=W0703
|
||||
raise
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
logger.error('Error in insturment {}'.format(self.instrument.name))
|
||||
logger.error('Error in instrument {}'.format(self.instrument.name))
|
||||
global failures_detected # pylint: disable=W0603
|
||||
failures_detected = True
|
||||
if isinstance(e, WAError):
|
||||
@@ -275,9 +288,15 @@ def install(instrument):
|
||||
attr = getattr(instrument, attr_name)
|
||||
if not callable(attr):
|
||||
raise ValueError('Attribute {} not callable in {}.'.format(attr_name, instrument))
|
||||
arg_num = len(inspect.getargspec(attr).args)
|
||||
if not arg_num == 2:
|
||||
raise ValueError('{} must take exactly 2 arguments; {} given.'.format(attr_name, arg_num))
|
||||
argspec = inspect.getargspec(attr)
|
||||
arg_num = len(argspec.args)
|
||||
# Instrument callbacks will be passed exactly two arguments: self
|
||||
# (the instrument instance to which the callback is bound) and
|
||||
# context. However, we also allow callbacks to capture the context
|
||||
# in variable arguments (declared as "*args" in the definition).
|
||||
if arg_num > 2 or (arg_num < 2 and argspec.varargs is None):
|
||||
message = '{} must take exactly 2 positional arguments; {} given.'
|
||||
raise ValueError(message.format(attr_name, arg_num))
|
||||
|
||||
logger.debug('\tConnecting %s to %s', attr.__name__, SIGNAL_MAP[stripped_attr_name])
|
||||
mc = ManagedCallback(instrument, attr)
|
||||
@@ -300,7 +319,7 @@ def get_instrument(inst):
|
||||
if isinstance(inst, Instrument):
|
||||
return inst
|
||||
for installed_inst in installed:
|
||||
if installed_inst.name == inst:
|
||||
if identifier(installed_inst.name) == identifier(inst):
|
||||
return installed_inst
|
||||
raise ValueError('Instrument {} is not installed'.format(inst))
|
||||
|
||||
@@ -366,9 +385,14 @@ class Instrument(Extension):
|
||||
self.is_enabled = True
|
||||
self.is_broken = False
|
||||
|
||||
def initialize(self, context):
|
||||
pass
|
||||
|
||||
def finalize(self, context):
|
||||
pass
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def __repr__(self):
|
||||
return 'Instrument({})'.format(self.name)
|
||||
|
||||
|
@@ -65,10 +65,15 @@ class ResourceResolver(object):
|
||||
self.logger.debug('Trying {}'.format(getter))
|
||||
result = getter.get(resource, *args, **kwargs)
|
||||
if result is not None:
|
||||
self.logger.debug('Resource {} found using {}'.format(resource, getter))
|
||||
self.logger.debug('Resource {} found using {}:'.format(resource, getter))
|
||||
self.logger.debug('\t{}'.format(result))
|
||||
return result
|
||||
if strict:
|
||||
raise ResourceError('{} could not be found'.format(resource))
|
||||
if kwargs:
|
||||
criteria = ', '.join(['{}:{}'.format(k, v) for k, v in kwargs.iteritems()])
|
||||
raise ResourceError('{} ({}) could not be found'.format(resource, criteria))
|
||||
else:
|
||||
raise ResourceError('{} could not be found'.format(resource))
|
||||
self.logger.debug('Resource {} not found.'.format(resource))
|
||||
return None
|
||||
|
||||
|
@@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from wlauto.core.bootstrap import settings
|
||||
from wlauto.core.extension import Extension
|
||||
|
||||
|
||||
@@ -37,10 +38,10 @@ class GetterPriority(object):
|
||||
"""
|
||||
cached = 20
|
||||
preferred = 10
|
||||
remote = 5
|
||||
environment = 0
|
||||
external_package = -5
|
||||
package = -10
|
||||
remote = -20
|
||||
|
||||
|
||||
class Resource(object):
|
||||
@@ -81,7 +82,7 @@ class ResourceGetter(Extension):
|
||||
Base class for implementing resolvers. Defines resolver interface. Resolvers are
|
||||
responsible for discovering resources (such as particular kinds of files) they know
|
||||
about based on the parameters that are passed to them. Each resolver also has a dict of
|
||||
attributes that describe it's operation, and may be used to determine which get invoked.
|
||||
attributes that describe its operation, and may be used to determine which get invoked.
|
||||
There is no pre-defined set of attributes and resolvers may define their own.
|
||||
|
||||
Class attributes:
|
||||
@@ -169,6 +170,7 @@ class __NullOwner(object):
|
||||
"""Represents an owner for a resource not owned by anyone."""
|
||||
|
||||
name = 'noone'
|
||||
dependencies_directory = settings.dependencies_directory
|
||||
|
||||
def __getattr__(self, name):
|
||||
return None
|
||||
|
@@ -44,7 +44,7 @@ from datetime import datetime
|
||||
from wlauto.core.extension import Extension
|
||||
from wlauto.exceptions import WAError
|
||||
from wlauto.utils.types import numeric
|
||||
from wlauto.utils.misc import enum_metaclass
|
||||
from wlauto.utils.misc import enum_metaclass, merge_dicts
|
||||
|
||||
|
||||
class ResultManager(object):
|
||||
@@ -191,12 +191,13 @@ class RunResult(object):
|
||||
else:
|
||||
return self.UNKNOWN # should never happen
|
||||
|
||||
def __init__(self, run_info):
|
||||
def __init__(self, run_info, output_directory=None):
|
||||
self.info = run_info
|
||||
self.iteration_results = []
|
||||
self.artifacts = []
|
||||
self.events = []
|
||||
self.non_iteration_errors = False
|
||||
self.output_directory = output_directory
|
||||
|
||||
|
||||
class RunEvent(object):
|
||||
@@ -253,14 +254,18 @@ class IterationResult(object):
|
||||
self.spec = spec
|
||||
self.id = spec.id
|
||||
self.workload = spec.workload
|
||||
self.classifiers = copy(spec.classifiers)
|
||||
self.iteration = None
|
||||
self.status = self.NOT_STARTED
|
||||
self.output_directory = None
|
||||
self.events = []
|
||||
self.metrics = []
|
||||
self.artifacts = []
|
||||
|
||||
def add_metric(self, name, value, units=None, lower_is_better=False):
|
||||
self.metrics.append(Metric(name, value, units, lower_is_better))
|
||||
def add_metric(self, name, value, units=None, lower_is_better=False, classifiers=None):
|
||||
classifiers = merge_dicts(self.classifiers, classifiers or {},
|
||||
list_duplicates='last', should_normalize=False)
|
||||
self.metrics.append(Metric(name, value, units, lower_is_better, classifiers))
|
||||
|
||||
def has_metric(self, name):
|
||||
for metric in self.metrics:
|
||||
@@ -298,14 +303,18 @@ class Metric(object):
|
||||
has no units (e.g. it's a count or a standardised score).
|
||||
:param lower_is_better: Boolean flag indicating where lower values are
|
||||
better than higher ones. Defaults to False.
|
||||
:param classifiers: A set of key-value pairs to further classify this metric
|
||||
beyond current iteration (e.g. this can be used to identify
|
||||
sub-tests).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name, value, units=None, lower_is_better=False):
|
||||
def __init__(self, name, value, units=None, lower_is_better=False, classifiers=None):
|
||||
self.name = name
|
||||
self.value = numeric(value)
|
||||
self.units = units
|
||||
self.lower_is_better = lower_is_better
|
||||
self.classifiers = classifiers or {}
|
||||
|
||||
def to_dict(self):
|
||||
return self.__dict__
|
||||
@@ -318,4 +327,3 @@ class Metric(object):
|
||||
return '<{}>'.format(result)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
@@ -186,4 +186,3 @@ def send(signal, sender, *args, **kwargs):
|
||||
|
||||
"""
|
||||
dispatcher.send(signal, sender, *args, **kwargs)
|
||||
|
||||
|
@@ -18,7 +18,7 @@ from collections import namedtuple
|
||||
|
||||
VersionTuple = namedtuple('Version', ['major', 'minor', 'revision'])
|
||||
|
||||
version = VersionTuple(2, 3, 0)
|
||||
version = VersionTuple(2, 6, 0)
|
||||
|
||||
|
||||
def get_wa_version():
|
||||
|
@@ -37,6 +37,7 @@ class Workload(Extension):
|
||||
supported_devices = []
|
||||
supported_platforms = []
|
||||
summary_metrics = []
|
||||
requires_network = False
|
||||
|
||||
def __init__(self, device, **kwargs):
|
||||
"""
|
||||
@@ -53,25 +54,33 @@ class Workload(Extension):
|
||||
|
||||
def init_resources(self, context):
|
||||
"""
|
||||
May be optionally overridden by concrete instances in order to discover and initialise
|
||||
necessary resources. This method will be invoked at most once during the execution:
|
||||
before running any workloads, and before invocation of ``validate()``, but after it is
|
||||
clear that this workload will run (i.e. this method will not be invoked for workloads
|
||||
that have been discovered but have not been scheduled run in the agenda).
|
||||
This method may be used to perform early resource discovery and initialization. This is invoked
|
||||
during the initial loading stage and before the device is ready, so cannot be used for any
|
||||
device-dependent initialization. This method is invoked before the workload instance is
|
||||
validated.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def setup(self, context):
|
||||
def initialize(self, context):
|
||||
"""
|
||||
Perform the setup necessary to run the workload, such as copying the necessry files
|
||||
This method should be used to perform once-per-run initialization of a workload instance, i.e.,
|
||||
unlike ``setup()`` it will not be invoked on each iteration.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def setup(self, context): # pylint: disable=unused-argument
|
||||
"""
|
||||
Perform the setup necessary to run the workload, such as copying the necessary files
|
||||
to the device, configuring the environments, etc.
|
||||
|
||||
This is also the place to perform any on-device checks prior to attempting to execute
|
||||
the workload.
|
||||
|
||||
"""
|
||||
pass
|
||||
if self.requires_network:
|
||||
self.check_network_connected()
|
||||
|
||||
def run(self, context):
|
||||
"""Execute the workload. This is the method that performs the actual "work" of the"""
|
||||
@@ -89,6 +98,13 @@ class Workload(Extension):
|
||||
""" Perform any final clean up for the Workload. """
|
||||
pass
|
||||
|
||||
def finalize(self, context):
|
||||
pass
|
||||
|
||||
def check_network_connected(self):
|
||||
if not self.device.is_network_connected():
|
||||
message = 'Workload "{}" requires internet. Device "{}" does not appear to be connected to the internet.'
|
||||
raise WorkloadError(message.format(self.name, self.device.name))
|
||||
|
||||
def __str__(self):
|
||||
return '<Workload {}>'.format(self.name)
|
||||
|
||||
|
@@ -12,5 +12,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
|
@@ -12,5 +12,3 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user