mirror of
https://github.com/genodelabs/genode.git
synced 2024-12-21 06:33:31 +00:00
depot_autiopilot: consider log_prefix attribute
The new 'log_prefix' attribute is effective when used in a tests runtime in <succeed> or <fail> tags that have a non-empty content string. When matching the log against the pattern given in the affected <succeed> or <fail> tag, the Depot Autopilot will consider only those test-log lines that start with the given prefix. Ref #4922
This commit is contained in:
parent
c47a6b0830
commit
987dea5f7f
@ -225,6 +225,13 @@ Besides the mandatory package content, a test package is expected to provide a
|
||||
characters '<', '&', '*' in the pattern must be escaped as "<", "&",
|
||||
"*". A character '*' in the pattern is treated as non-greedy wildcard.
|
||||
|
||||
:<fail log_prefix="[init -> test]">Error!</fail>:
|
||||
:<succeed log_prefix="[init -> test]">Done!</succeed>:
|
||||
|
||||
When matching the log against the pattern given in the <succeed> or
|
||||
<fail> tag, the Depot Autopilot will consider only those test-log lines that
|
||||
start with the given prefix.
|
||||
|
||||
:<content>:
|
||||
|
||||
Lists required files from the test-package build besides the root-component
|
||||
|
@ -78,14 +78,15 @@ static Filter const *filter_to_apply(FILTERS const &filters,
|
||||
static size_t sanitize_pattern(char *const base,
|
||||
size_t size)
|
||||
{
|
||||
static Filters<5> pattern_filters
|
||||
static Filters<6> pattern_filters
|
||||
{
|
||||
{
|
||||
{ "\x9", "" },
|
||||
{ "\xa", "" },
|
||||
{ "<", "<" },
|
||||
{ "&", "&" },
|
||||
{ "*", "*" }
|
||||
{ "*", "*" },
|
||||
{ """, "\"" }
|
||||
}
|
||||
};
|
||||
struct Bad_filter : Exception { };
|
||||
@ -984,7 +985,11 @@ bool Log_event::handle_log_update(Expanding_string const &log_str)
|
||||
{
|
||||
while (true) {
|
||||
|
||||
/* determine current pattern chunk */
|
||||
/*
|
||||
* Determine the log pattern chunk that covers the point defined
|
||||
* by the current value of _pattern_offset. I.e., the first chunk of
|
||||
* the pattern that could not be fully matched against the log yet.
|
||||
*/
|
||||
Plain_string const *pattern_chunk { nullptr };
|
||||
size_t pattern_chunk_offset { _pattern_offset };
|
||||
_plain_strings.for_each([&] (Plain_string const &chunk) {
|
||||
@ -997,13 +1002,21 @@ bool Log_event::handle_log_update(Expanding_string const &log_str)
|
||||
pattern_chunk = &chunk;
|
||||
}
|
||||
});
|
||||
/*
|
||||
* If there is nothing left to match, stop and return that the event
|
||||
* was just triggered.
|
||||
*/
|
||||
if (!pattern_chunk) {
|
||||
return true;
|
||||
}
|
||||
/* get the range of yet unmatched bytes inside the pattern chunk */
|
||||
char const *pattern_chunk_curr { pattern_chunk->base() + pattern_chunk_offset };
|
||||
size_t const pattern_chunk_left { pattern_chunk->size() - pattern_chunk_offset };
|
||||
|
||||
/* determine current log chunk */
|
||||
/*
|
||||
* Determine the buffered log chunk that covers the point
|
||||
* defined by the current value of _log_offset.
|
||||
*/
|
||||
Expanding_string::Chunk const *log_chunk { nullptr };
|
||||
size_t log_chunk_offset { _log_offset };
|
||||
log_str.for_each_chunk([&] (Expanding_string::Chunk const &chunk) {
|
||||
@ -1016,19 +1029,53 @@ bool Log_event::handle_log_update(Expanding_string const &log_str)
|
||||
log_chunk = &chunk;
|
||||
}
|
||||
});
|
||||
/*
|
||||
* If there is no log left to process, stop and return that the event
|
||||
* was not yet triggered.
|
||||
*/
|
||||
if (!log_chunk) {
|
||||
return false;
|
||||
}
|
||||
if (_log_prefix_valid) {
|
||||
|
||||
/*
|
||||
* If the log chunk doesn't start with the log prefix configured
|
||||
* for this event, completely ignore the chunk.
|
||||
*/
|
||||
if (memcmp(log_chunk->base(), _log_prefix.string(), _log_prefix.length() - 1)) {
|
||||
|
||||
_log_offset += log_chunk->size();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
/* get the range of yet unprocessed bytes inside the log chunk */
|
||||
char const *log_chunk_curr { log_chunk->base() + log_chunk_offset };
|
||||
size_t const log_chunk_left { log_chunk->size() - log_chunk_offset };
|
||||
|
||||
/* compare log with pattern */
|
||||
/*
|
||||
* Compare the yet unmatched pattern bytes to the yet unprocessed log
|
||||
* bytes advance .
|
||||
*/
|
||||
size_t const cmp_size { min(log_chunk_left, pattern_chunk_left) };
|
||||
if (memcmp(pattern_chunk_curr, log_chunk_curr, cmp_size)) {
|
||||
|
||||
/*
|
||||
* If the offset into the pattern chunk is > 0, this means that
|
||||
* the chunk could be matched partially against the less advanced
|
||||
* log buffer during the last update. If the remaining bytes now
|
||||
* fail to match against the just arrived subsequent log bytes,
|
||||
* we must discard the partial match and try to match the whole
|
||||
* chunk again. Note that it is correct to then increase the log
|
||||
* offset in any case because continuing with the partial match
|
||||
* would not have failed if (_log_offset - pattern_chunk_offset)
|
||||
* would point to a match for the whole pattern chunk.
|
||||
*/
|
||||
_pattern_offset -= pattern_chunk_offset;
|
||||
_log_offset -= pattern_chunk_offset;
|
||||
_log_offset += 1;
|
||||
|
||||
} else {
|
||||
|
||||
_pattern_offset += cmp_size;
|
||||
_log_offset += cmp_size;
|
||||
}
|
||||
@ -1064,11 +1111,31 @@ Log_event::~Log_event()
|
||||
}
|
||||
|
||||
|
||||
Log_prefix Log_event::_init_log_prefix(Xml_node const &xml)
|
||||
{
|
||||
if (!xml.has_attribute("log_prefix"))
|
||||
return Log_prefix { };
|
||||
|
||||
char buf[Log_prefix::size()];
|
||||
size_t buf_str_size { 0 };
|
||||
xml.attribute("log_prefix").with_raw_value([&] (char const *src_ptr, size_t src_size) {
|
||||
|
||||
size_t const cpy_size = min(src_size, sizeof(buf) - 1);
|
||||
memcpy(buf, src_ptr, cpy_size);
|
||||
buf[cpy_size] = 0;
|
||||
buf_str_size = sanitize_pattern(buf, cpy_size + 1);
|
||||
});
|
||||
return Cstring { buf, buf_str_size };
|
||||
}
|
||||
|
||||
|
||||
Log_event::Log_event(Allocator &alloc,
|
||||
Xml_node const &xml)
|
||||
:
|
||||
Event { xml, Type::LOG },
|
||||
_alloc { alloc }
|
||||
Event { xml, Type::LOG },
|
||||
_alloc { alloc },
|
||||
_log_prefix { _init_log_prefix(xml) },
|
||||
_log_prefix_valid { _log_prefix != Log_prefix { } }
|
||||
{
|
||||
char const *const base { xml_content_base(xml) };
|
||||
size_t const size { xml_content_size(xml) };
|
||||
|
@ -45,6 +45,8 @@ namespace Depot_deploy {
|
||||
void print(Genode::Output &output) const;
|
||||
};
|
||||
|
||||
using Log_prefix = String<256>;
|
||||
|
||||
class Child;
|
||||
class Event;
|
||||
class Timeout_event;
|
||||
@ -169,17 +171,32 @@ class Depot_deploy::Log_event : public Event,
|
||||
}
|
||||
};
|
||||
|
||||
Genode::Allocator &_alloc;
|
||||
size_t _log_offset { 0 };
|
||||
size_t _pattern_offset { 0 };
|
||||
Genode::Fifo<Plain_string> _plain_strings { };
|
||||
Genode::Allocator &_alloc;
|
||||
|
||||
void _replace_wildcards_with_0();
|
||||
/*
|
||||
* Defines a point inside the concatenation of all chunks of the
|
||||
* buffered log. Up to that point the buffered log has been processed
|
||||
* by this log event already.
|
||||
*/
|
||||
size_t _log_offset { 0 };
|
||||
|
||||
/*
|
||||
* Defines a point inside the concatenation of all chunks of the log
|
||||
* pattern of this event. Up to that point the pattern could be
|
||||
* successfully matched against the log so far.
|
||||
*/
|
||||
size_t _pattern_offset { 0 };
|
||||
|
||||
Genode::Fifo<Plain_string> _plain_strings { };
|
||||
Log_prefix const _log_prefix;
|
||||
bool const _log_prefix_valid;
|
||||
|
||||
Log_event(Log_event const &);
|
||||
|
||||
Log_event const & operator=(const Log_event&);
|
||||
|
||||
Log_prefix _init_log_prefix(Xml_node const &xml);
|
||||
|
||||
public:
|
||||
|
||||
Log_event(Allocator &alloc,
|
||||
|
Loading…
Reference in New Issue
Block a user