mirror of
https://github.com/zerotier/ZeroTierOne.git
synced 2025-01-30 08:04:04 +00:00
cleanup
This commit is contained in:
parent
531842dc6b
commit
055b99c3cb
@ -1,58 +0,0 @@
|
||||
/*
|
||||
#include <signal.h>
|
||||
#include <ctime>
|
||||
#include <chrono>
|
||||
#include <rethinkdb.h>
|
||||
|
||||
namespace R = RethinkDB;
|
||||
std::unique_ptr<R::Connection> conn;
|
||||
|
||||
int main() {
|
||||
signal(SIGPIPE, SIG_IGN);
|
||||
try {
|
||||
conn = R::connect();
|
||||
} catch(const R::Error& error) {
|
||||
printf("FAILURE: could not connect to localhost:28015: %s\n", error.message.c_str());
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
printf("running test...\n");
|
||||
auto start = std::chrono::steady_clock::now();
|
||||
R::Datum d = R::range(1, 1000000)
|
||||
.map([]() { return R::object("test", "hello", "data", "world"); })
|
||||
.run(*conn);
|
||||
auto end = std::chrono::steady_clock::now();
|
||||
auto diff = end - start;
|
||||
|
||||
printf("result size: %d\n", (int)d.get_array()->size());
|
||||
printf("completed in %f ms\n", std::chrono::duration<double, std::milli>(diff).count());
|
||||
} catch (const R::Error& error) {
|
||||
printf("FAILURE: uncaught exception: %s\n", error.message.c_str());
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
#include <rethinkdb.h>
|
||||
|
||||
namespace R = RethinkDB;
|
||||
|
||||
int main() {
|
||||
auto conn = R::connect();
|
||||
if (!conn) {
|
||||
std::cerr << "Could not connect to server\n";
|
||||
return 1;
|
||||
}
|
||||
|
||||
std::cout << "Connected" << std::endl;
|
||||
R::Cursor databases = R::db_list().run(*conn);
|
||||
for (R::Datum const& db : databases) {
|
||||
std::cout << *db.get_string() << '\n';
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -1,11 +0,0 @@
|
||||
from sys import argv
|
||||
from re import sub
|
||||
|
||||
print("#include \"testlib.h\"");
|
||||
print("void run_upstream_tests() {")
|
||||
for path in argv[1:]:
|
||||
name = sub('/', '_', path.split('.')[0])
|
||||
print(" extern void %s();" % name)
|
||||
print(" clean_slate();")
|
||||
print(" %s();" % name)
|
||||
print("}")
|
@ -1,114 +0,0 @@
|
||||
#include <signal.h>
|
||||
|
||||
#include <ctime>
|
||||
|
||||
#include "testlib.h"
|
||||
|
||||
extern void run_upstream_tests();
|
||||
|
||||
void test_json(const char* string, const char* ret = "") {
|
||||
TEST_EQ(R::Datum::from_json(string).as_json().c_str(), ret[0] ? ret : string);
|
||||
}
|
||||
|
||||
void test_json_parse_print() {
|
||||
enter_section("json");
|
||||
test_json("-0.0", "-0.0");
|
||||
test_json("null");
|
||||
test_json("1.2");
|
||||
test_json("1.2e20", "1.2e+20");
|
||||
test_json("true");
|
||||
test_json("false");
|
||||
test_json("\"\"");
|
||||
test_json("\"\\u1234\"", "\"\u1234\"");
|
||||
test_json("\"\\\"\"");
|
||||
test_json("\"foobar\"");
|
||||
test_json("[]");
|
||||
test_json("[1]");
|
||||
test_json("[1,2,3,4]");
|
||||
test_json("{}");
|
||||
test_json("{\"a\":1}");
|
||||
test_json("{\"a\":1,\"b\":2,\"c\":3}");
|
||||
exit_section();
|
||||
}
|
||||
|
||||
void test_reql() {
|
||||
enter_section("reql");
|
||||
TEST_EQ((R::expr(1) + 2).run(*conn), R::Datum(3));
|
||||
TEST_EQ(R::range(4).count().run(*conn), R::Datum(4));
|
||||
TEST_EQ(R::js("Math.abs")(-1).run(*conn), 1);
|
||||
exit_section();
|
||||
}
|
||||
|
||||
void test_cursor() {
|
||||
enter_section("cursor");
|
||||
R::Cursor cursor = R::range(10000).run(*conn);
|
||||
TEST_EQ(cursor.next(), 0);
|
||||
R::Array array = cursor.to_array();
|
||||
TEST_EQ(array.size(), 9999);
|
||||
TEST_EQ(*array.begin(), 1);
|
||||
TEST_EQ(*array.rbegin(), 9999);
|
||||
int i = 0;
|
||||
R::range(3).run(*conn).each([&i](R::Datum&& datum){
|
||||
TEST_EQ(datum, i++); });
|
||||
exit_section();
|
||||
}
|
||||
|
||||
void test_encode(const char* str, const char* b) {
|
||||
TEST_EQ(R::base64_encode(str), b);
|
||||
}
|
||||
|
||||
void test_decode(const char* b, const char* str) {
|
||||
std::string out;
|
||||
TEST_EQ(R::base64_decode(b, out), true);
|
||||
TEST_EQ(out, str);
|
||||
}
|
||||
|
||||
#define TEST_B64(a, b) test_encode(a, b); test_decode(b, a)
|
||||
|
||||
void test_binary() {
|
||||
enter_section("base64");
|
||||
TEST_B64("", "");
|
||||
TEST_B64("foo", "Zm9v");
|
||||
exit_section();
|
||||
}
|
||||
|
||||
void test_issue28() {
|
||||
enter_section("issue #28");
|
||||
std::vector<std::string> expected{ "rethinkdb", "test" };
|
||||
std::vector<std::string> dbs;
|
||||
R::Cursor databases = R::db_list().run(*conn);
|
||||
for (R::Datum const& db : databases) {
|
||||
dbs.push_back(*db.get_string());
|
||||
}
|
||||
|
||||
TEST_EQ(dbs, expected);
|
||||
exit_section();
|
||||
}
|
||||
|
||||
int main() {
|
||||
signal(SIGPIPE, SIG_IGN);
|
||||
srand(time(NULL));
|
||||
try {
|
||||
conn = R::connect();
|
||||
} catch(const R::Error& error) {
|
||||
printf("FAILURE: could not connect to localhost:28015: %s\n", error.message.c_str());
|
||||
return 1;
|
||||
}
|
||||
try {
|
||||
//test_binary();
|
||||
//test_json_parse_print();
|
||||
//test_reql();
|
||||
//test_cursor();
|
||||
test_issue28();
|
||||
run_upstream_tests();
|
||||
} catch (const R::Error& error) {
|
||||
printf("FAILURE: uncaught expception: %s\n", error.message.c_str());
|
||||
return 1;
|
||||
}
|
||||
if (!failed) {
|
||||
printf("SUCCESS: %d tests passed\n", count);
|
||||
} else {
|
||||
printf("DONE: %d of %d tests failed\n", failed, count);
|
||||
return 1;
|
||||
}
|
||||
}
|
@ -1,356 +0,0 @@
|
||||
#include <algorithm>
|
||||
#include <regex>
|
||||
#include <thread>
|
||||
#include <chrono>
|
||||
#include <unistd.h>
|
||||
|
||||
#include "testlib.h"
|
||||
|
||||
int verbosity = 0;
|
||||
|
||||
int failed = 0;
|
||||
int count = 0;
|
||||
std::vector<std::pair<const char*, bool>> section;
|
||||
|
||||
std::unique_ptr<R::Connection> conn;
|
||||
|
||||
// std::string to_string(const R::Cursor&) {
|
||||
// return "<Cursor>";
|
||||
// }
|
||||
|
||||
std::string to_string(const R::Term& query) {
|
||||
return to_string(query.get_datum());
|
||||
}
|
||||
|
||||
std::string to_string(const R::Datum& datum) {
|
||||
return datum.as_json();
|
||||
}
|
||||
|
||||
std::string to_string(const R::Object& object) {
|
||||
auto it = object.find("special");
|
||||
if (it != object.end()) {
|
||||
std::string type = *(it->second).get_string();
|
||||
auto bag = object.find(type);
|
||||
if (bag != object.end()) {
|
||||
return to_string((R::Datum)bag->second);
|
||||
}
|
||||
}
|
||||
|
||||
return to_string((R::Datum)object);
|
||||
}
|
||||
|
||||
std::string to_string(const R::Error& error) {
|
||||
return "Error(\"" + error.message + "\")";
|
||||
}
|
||||
|
||||
void enter_section(const char* name) {
|
||||
if (verbosity == 0) {
|
||||
section.emplace_back(name, true);
|
||||
} else {
|
||||
printf("%sSection %s\n", indent(), name);
|
||||
section.emplace_back(name, false);
|
||||
}
|
||||
}
|
||||
|
||||
void section_cleanup() {
|
||||
R::db("test").table_list().for_each([=](R::Var table) {
|
||||
return R::db("test").table_drop(*table);
|
||||
}).run(*conn);
|
||||
}
|
||||
|
||||
void exit_section() {
|
||||
section.pop_back();
|
||||
}
|
||||
|
||||
std::string to_string(const err& error) {
|
||||
return "Error(\"" + error.convert_type() + ": " + error.message + "\")";
|
||||
}
|
||||
|
||||
bool equal(const R::Error& a, const err& b) {
|
||||
// @TODO: I think the proper solution to this proble is to in fact create
|
||||
// a hierarchy of exception types. This would not only simplify these
|
||||
// error cases, but could be of great use to the user.
|
||||
std::string error_type = b.convert_type();
|
||||
if (error_type == "ReqlServerCompileError" &&
|
||||
a.message.find("ReqlCompileError") != std::string::npos) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return b.trim_message(a.message) == (error_type + ": " + b.message);
|
||||
}
|
||||
|
||||
bool match(const char* pattern, const char* string) {
|
||||
return std::regex_match(string, std::regex(pattern));
|
||||
}
|
||||
|
||||
bool equal(const R::Error& a, const err_regex& b) {
|
||||
if (b.message == "Object keys must be strings" &&
|
||||
a.message == "runtime error: Expected type STRING but found NUMBER.") {
|
||||
return true;
|
||||
}
|
||||
return match(b.regex().c_str(), a.message.c_str());
|
||||
}
|
||||
|
||||
std::string to_string(const err_regex& error) {
|
||||
return "err_regex(" + error.type + ", " + error.message + ")";
|
||||
}
|
||||
|
||||
R::Object partial(R::Object&& object) {
|
||||
return R::Object{{"special", "partial"}, {"partial", std::move(object)}};
|
||||
}
|
||||
|
||||
R::Datum uuid() {
|
||||
return R::Object{{"special", "uuid"}};
|
||||
}
|
||||
|
||||
R::Object arrlen(int n, R::Datum&& datum) {
|
||||
return R::Object{{"special", "arrlen"},{"len",n},{"of",datum}};
|
||||
}
|
||||
|
||||
R::Object arrlen(int n) {
|
||||
return R::Object{{"special", "arrlen"},{"len",n}};
|
||||
}
|
||||
|
||||
std::string repeat(std::string&& s, int n) {
|
||||
std::string string;
|
||||
string.reserve(n * s.size());
|
||||
for (int i = 0; i < n; ++i) {
|
||||
string.append(s);
|
||||
}
|
||||
return string;
|
||||
}
|
||||
|
||||
R::Term fetch(R::Cursor& cursor, int count, double timeout) {
|
||||
// printf("fetch(..., %d, %lf)\n", count, timeout);
|
||||
R::Array array;
|
||||
int deadline = time(NULL) + int(timeout);
|
||||
for (int i = 0; count == -1 || i < count; ++i) {
|
||||
// printf("fetching next (%d)\n", i);
|
||||
time_t now = time(NULL);
|
||||
if (now > deadline) break;
|
||||
|
||||
try {
|
||||
array.emplace_back(cursor.next(deadline - now));
|
||||
// printf("got %s\n", write_datum(array[array.size()-1]).c_str());
|
||||
} catch (const R::Error &e) {
|
||||
if (e.message != "next: No more data") {
|
||||
throw e; // rethrow
|
||||
}
|
||||
|
||||
break;
|
||||
} catch (const R::TimeoutException &e){
|
||||
// printf("fetch timeout\n");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return expr(std::move(array));
|
||||
}
|
||||
|
||||
R::Object bag(R::Array&& array) {
|
||||
return R::Object{{"special", "bag"}, {"bag", std::move(array)}};
|
||||
};
|
||||
|
||||
R::Object bag(R::Datum&& d) {
|
||||
return R::Object{{"special", "bag"}, {"bag", std::move(d)}};
|
||||
};
|
||||
|
||||
std::string string_key(const R::Datum& datum) {
|
||||
const std::string* string = datum.get_string();
|
||||
if (string) return *string;
|
||||
return datum.as_json();
|
||||
}
|
||||
|
||||
bool falsey(R::Datum&& datum) {
|
||||
bool* boolean = datum.get_boolean();
|
||||
if (boolean) return !*boolean;
|
||||
double* number = datum.get_number();
|
||||
if (number) return *number == 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
bool equal(const R::Datum& got, const R::Datum& expected) {
|
||||
const std::string* string = expected.get_string();
|
||||
if (string) {
|
||||
const R::Binary* binary = got.get_binary();
|
||||
if (binary) {
|
||||
return *binary == R::Binary(*string);
|
||||
}
|
||||
}
|
||||
if (expected.get_object() && expected.get_field("$reql_type$")) {
|
||||
if (!got.get_field("$reql_type$")) {
|
||||
R::Datum datum = got.to_raw();
|
||||
if (datum.get_field("$reql_type$")) {
|
||||
return equal(datum, expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (got.get_object() && got.get_field("$reql_type$")) {
|
||||
const std::string* type = got.get_field("$reql_type$")->get_string();
|
||||
if (type && *type == "GROUPED_DATA" &&
|
||||
(!expected.get_object() || !expected.get_field("$reql_type$"))) {
|
||||
const R::Array* data = got.get_field("data")->get_array();
|
||||
R::Object object;
|
||||
for (R::Datum it : *data) {
|
||||
object.emplace(string_key(it.extract_nth(0)), it.extract_nth(1));
|
||||
}
|
||||
return equal(object, expected);
|
||||
}
|
||||
}
|
||||
do {
|
||||
if (!expected.get_object()) break;
|
||||
if(!expected.get_field("special")) break;
|
||||
const std::string* type = expected.get_field("special")->get_string();
|
||||
if (!type) break;
|
||||
if (*type == "bag") {
|
||||
const R::Datum* bag_datum = expected.get_field("bag");
|
||||
if (!bag_datum || !bag_datum->get_array()) {
|
||||
break;
|
||||
}
|
||||
R::Array bag = *bag_datum->get_array();
|
||||
const R::Array* array = got.get_array();
|
||||
if (!array) {
|
||||
return false;
|
||||
}
|
||||
if (bag.size() != array->size()) {
|
||||
return false;
|
||||
}
|
||||
for (const auto& it : *array) {
|
||||
auto ref = std::find(bag.begin(), bag.end(), it);
|
||||
if (ref == bag.end()) return false;
|
||||
bag.erase(ref);
|
||||
}
|
||||
return true;
|
||||
} else if (*type == "arrlen") {
|
||||
const R::Datum* len_datum = expected.get_field("len");
|
||||
if (!len_datum) break;
|
||||
const double *len = len_datum->get_number();
|
||||
if (!len) break;
|
||||
const R::Array* array = got.get_array();
|
||||
if (!array) break;
|
||||
return array->size() == *len;
|
||||
} else if (*type == "partial") {
|
||||
const R::Object* object = got.get_object();
|
||||
if (object) {
|
||||
const R::Datum* partial_datum = expected.get_field("partial");
|
||||
if (!partial_datum) break;
|
||||
const R::Object* partial = partial_datum->get_object();
|
||||
if (!partial) break;
|
||||
for (const auto& it : *partial) {
|
||||
if (!object->count(it.first) || !equal((*object).at(it.first), it.second)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
const R::Array* array = got.get_array();
|
||||
if (array) {
|
||||
const R::Datum* partial_datum = expected.get_field("partial");
|
||||
if (!partial_datum) break;
|
||||
const R::Array* partial = partial_datum->get_array();
|
||||
if (!partial) break;
|
||||
|
||||
for (const auto& want : *partial) {
|
||||
bool match = false;
|
||||
for (const auto& have : *array) {
|
||||
if (equal(have, want)) {
|
||||
match = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (match == false) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
} else if(*type == "uuid") {
|
||||
const std::string* string = got.get_string();
|
||||
if (string && string->size() == 36) {
|
||||
return true;
|
||||
}
|
||||
} else if (*type == "regex") {
|
||||
const R::Datum* regex_datum = expected.get_field("regex");
|
||||
if (!regex_datum) break;
|
||||
const std::string* regex = regex_datum->get_string();
|
||||
if (!regex) break;
|
||||
const std::string* str = got.get_string();
|
||||
if (!str) break;
|
||||
return match(regex->c_str(), str->c_str());
|
||||
}
|
||||
} while(0);
|
||||
const R::Object* got_object = got.get_object();
|
||||
const R::Object* expected_object = expected.get_object();
|
||||
if (got_object && expected_object) {
|
||||
R::Object have = *got_object;
|
||||
for (const auto& it : *expected_object) {
|
||||
auto other = have.find(it.first);
|
||||
if (other == have.end()) return false;
|
||||
if (!equal(other->second, it.second)) return false;
|
||||
have.erase(other);
|
||||
}
|
||||
for (auto& it : have) {
|
||||
if (!falsey(std::move(it.second))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
const R::Array* got_array = got.get_array();
|
||||
const R::Array* expected_array = expected.get_array();
|
||||
if (got_array && expected_array) {
|
||||
if (got_array->size() != expected_array->size()) return false;
|
||||
for (R::Array::const_iterator i = got_array->begin(), j = expected_array->begin();
|
||||
i < got_array->end();
|
||||
i++, j++) {
|
||||
if(!equal(*i, *j)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return got == expected;
|
||||
}
|
||||
|
||||
R::Object partial(R::Array&& array) {
|
||||
return R::Object{{"special", "partial"}, {"partial", std::move(array)}};
|
||||
}
|
||||
|
||||
R::Object regex(const char* pattern) {
|
||||
return R::Object{{"special", "regex"}, {"regex", pattern}};
|
||||
}
|
||||
|
||||
void clean_slate() {
|
||||
R::table_list().for_each([](R::Var t){ return R::table_drop(*t); });
|
||||
R::db("rethinkdb").table("_debug_scratch").delete_().run(*conn);
|
||||
}
|
||||
|
||||
const char* indent() {
|
||||
static const char spaces[] = " ";
|
||||
return spaces + sizeof(spaces) - 1 - 2 * section.size();
|
||||
}
|
||||
|
||||
std::string truncate(std::string&& string) {
|
||||
if (string.size() > 200) {
|
||||
return string.substr(0, 197) + "...";
|
||||
}
|
||||
return string;
|
||||
}
|
||||
|
||||
int len(const R::Datum& d) {
|
||||
const R::Array* arr = d.get_array();
|
||||
if (!arr) throw ("testlib: len: expected an array but got " + to_string(d));
|
||||
return arr->size();
|
||||
}
|
||||
|
||||
R::Term wait(int n) {
|
||||
std::this_thread::sleep_for(std::chrono::seconds(n));
|
||||
return R::expr(n);
|
||||
}
|
||||
|
||||
R::Datum nil = R::Nil();
|
||||
|
||||
R::Array append(R::Array lhs, R::Array rhs) {
|
||||
if (lhs.empty()) {
|
||||
return rhs;
|
||||
}
|
||||
lhs.reserve(lhs.size() + rhs.size());
|
||||
std::move(std::begin(rhs), std::end(rhs), std::back_inserter(lhs));
|
||||
return lhs;
|
||||
}
|
@ -1,231 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <sstream>
|
||||
#include <cstdio>
|
||||
#include <stack>
|
||||
#include <cmath>
|
||||
#include <regex>
|
||||
|
||||
#include <rethinkdb.h>
|
||||
|
||||
namespace R = RethinkDB;
|
||||
|
||||
extern std::vector<std::pair<const char*, bool>> section;
|
||||
extern int failed;
|
||||
extern int count;
|
||||
extern std::unique_ptr<R::Connection> conn;
|
||||
extern int verbosity;
|
||||
|
||||
const char* indent();
|
||||
|
||||
void enter_section(const char* name);
|
||||
void section_cleanup();
|
||||
void exit_section();
|
||||
|
||||
#define TEST_DO(code) \
|
||||
if (verbosity > 1) fprintf(stderr, "%sTEST: %s\n", indent(), #code); \
|
||||
code
|
||||
|
||||
#define TEST_EQ(code, expected) \
|
||||
do { \
|
||||
if (verbosity > 1) fprintf(stderr, "%sTEST: %s\n", indent(), #code); \
|
||||
try { test_eq(#code, (code), (expected)); } \
|
||||
catch (const R::Error& error) { test_eq(#code, error, (expected)); } \
|
||||
} while (0)
|
||||
|
||||
struct err {
|
||||
err(const char* type_, std::string message_, R::Array&& backtrace_ = {}) :
|
||||
type(type_), message(message_), backtrace(std::move(backtrace_)) { }
|
||||
|
||||
std::string convert_type() const {
|
||||
return type;
|
||||
}
|
||||
|
||||
static std::string trim_message(std::string msg) {
|
||||
size_t i = msg.find(":\n");
|
||||
if (i != std::string::npos) {
|
||||
return msg.substr(0, i + 1);
|
||||
}
|
||||
return msg;
|
||||
}
|
||||
|
||||
std::string type;
|
||||
std::string message;
|
||||
R::Array backtrace;
|
||||
};
|
||||
|
||||
struct err_regex {
|
||||
err_regex(const char* type_, const char* message_, R::Array&& backtrace_ = {}) :
|
||||
type(type_), message(message_), backtrace(std::move(backtrace_)) { }
|
||||
std::string type;
|
||||
std::string message;
|
||||
R::Array backtrace;
|
||||
std::string regex() const {
|
||||
return type + ": " + message;
|
||||
}
|
||||
};
|
||||
|
||||
R::Object regex(const char* pattern);
|
||||
|
||||
bool match(const char* pattern, const char* string);
|
||||
|
||||
R::Object partial(R::Object&& object);
|
||||
R::Object partial(R::Array&& array);
|
||||
R::Datum uuid();
|
||||
R::Object arrlen(int n, R::Datum&& datum);
|
||||
R::Object arrlen(int n);
|
||||
R::Term new_table();
|
||||
std::string repeat(std::string&& s, int n);
|
||||
R::Term fetch(R::Cursor& cursor, int count = -1, double timeout = 1);
|
||||
R::Object bag(R::Array&& array);
|
||||
R::Object bag(R::Datum&& d);
|
||||
|
||||
struct temp_table {
|
||||
temp_table() {
|
||||
char chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
char name_[15] = "temp_";
|
||||
for (unsigned int i = 5; i + 1 < sizeof name_; ++i) {
|
||||
name_[i] = chars[random() % (sizeof chars - 1)];
|
||||
}
|
||||
name_[14] = 0;
|
||||
R::table_create(name_).run(*conn);
|
||||
name = name_;
|
||||
}
|
||||
|
||||
~temp_table() {
|
||||
try {
|
||||
R::table_drop(name).run(*conn);
|
||||
} catch (const R::Error &e) {
|
||||
if(!strstr(e.message.c_str(), "does not exist")){
|
||||
printf("error dropping temp_table: %s\n", e.message.c_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
R::Term table() { return R::table(name); }
|
||||
std::string name;
|
||||
};
|
||||
|
||||
void clean_slate();
|
||||
|
||||
// std::string to_string(const R::Cursor&);
|
||||
std::string to_string(const R::Term&);
|
||||
std::string to_string(const R::Datum&);
|
||||
std::string to_string(const R::Error&);
|
||||
std::string to_string(const err_regex&);
|
||||
std::string to_string(const err&);
|
||||
|
||||
bool equal(const R::Datum&, const R::Datum&);
|
||||
bool equal(const R::Error&, const err_regex&);
|
||||
bool equal(const R::Error&, const err&);
|
||||
|
||||
template <class T>
|
||||
bool equal(const T& a, const err& b) {
|
||||
return false;
|
||||
}
|
||||
|
||||
template <class T>
|
||||
bool equal(const T& a, const err_regex& b) {
|
||||
return false;
|
||||
}
|
||||
|
||||
template <class T>
|
||||
bool equal(const R::Error& a, const T& b) {
|
||||
return false;
|
||||
}
|
||||
|
||||
std::string truncate(std::string&&);
|
||||
|
||||
template <class T, class U>
|
||||
void test_eq(const char* code, const T val, const U expected) {
|
||||
|
||||
try {
|
||||
count ++;
|
||||
if (!equal(val, expected)) {
|
||||
failed++;
|
||||
for (auto& it : section) {
|
||||
if (it.second) {
|
||||
printf("%sSection: %s\n", indent(), it.first);
|
||||
it.second = false;
|
||||
}
|
||||
}
|
||||
try {
|
||||
printf("%sFAILURE in ‘%s’:\n%s Expected: ‘%s’\n%s but got: ‘%s’\n",
|
||||
indent(), code,
|
||||
indent(), truncate(to_string(expected)).c_str(),
|
||||
indent(), truncate(to_string(val)).c_str());
|
||||
} catch (const R::Error& e) {
|
||||
printf("%sFAILURE: Failed to print failure description: %s\n", indent(), e.message.c_str());
|
||||
} catch (...) {
|
||||
printf("%sFAILURE: Failed to print failure description\n", indent());
|
||||
}
|
||||
}
|
||||
} catch (const std::regex_error& rx_err) {
|
||||
printf("%sSKIP: error with regex (likely a buggy regex implementation): %s\n", indent(), rx_err.what());
|
||||
}
|
||||
}
|
||||
|
||||
template <class U>
|
||||
void test_eq(const char* code, const R::Cursor& val, const U expected) {
|
||||
try {
|
||||
R::Datum result = val.to_datum();
|
||||
test_eq(code, result, expected);
|
||||
} catch (R::Error& error) {
|
||||
test_eq(code, error, expected);
|
||||
}
|
||||
}
|
||||
|
||||
int len(const R::Datum&);
|
||||
|
||||
R::Term wait(int n);
|
||||
|
||||
#define PacificTimeZone() (-7 * 3600)
|
||||
#define UTCTimeZone() (0)
|
||||
|
||||
extern R::Datum nil;
|
||||
|
||||
inline R::Cursor maybe_run(R::Cursor& c, R::Connection&, R::OptArgs&& o = {}) {
|
||||
return std::move(c);
|
||||
}
|
||||
|
||||
inline R::Cursor maybe_run(R::Term q, R::Connection& c, R::OptArgs&& o = {}) {
|
||||
return q.run(c, std::move(o));
|
||||
}
|
||||
|
||||
inline int operator+(R::Datum a, int b) {
|
||||
return a.extract_number() + b;
|
||||
}
|
||||
|
||||
inline R::Array operator*(R::Array arr, int n) {
|
||||
R::Array ret;
|
||||
for(int i = 0; i < n; i++) {
|
||||
for(const auto& it: arr) {
|
||||
ret.push_back(it);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
inline R::Array array_range(int x, int y) {
|
||||
R::Array ret;
|
||||
for(int i = x; i < y; ++i) {
|
||||
ret.push_back(i);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
template <class F>
|
||||
inline R::Array array_map(F f, R::Array a){
|
||||
R::Array ret;
|
||||
for(R::Datum& d: a) {
|
||||
ret.push_back(f(d.extract_number()));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
R::Array append(R::Array lhs, R::Array rhs);
|
||||
|
||||
template <class T>
|
||||
std::string str(T x){
|
||||
return to_string(x);
|
||||
}
|
@ -1,575 +0,0 @@
|
||||
desc: Tests that manipulation data in tables
|
||||
table_variable_name: tbl tbl2 tbl3 tbl4
|
||||
tests:
|
||||
|
||||
# Set up some data
|
||||
- cd: r.range(100).for_each(tbl.insert({'id':r.row, 'a':r.row.mod(4)}))
|
||||
rb: tbl.insert((0..99).map{ |i| { :id => i, :a => i % 4 } })
|
||||
ot: {'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':100}
|
||||
|
||||
- cd: r.range(100).for_each(tbl2.insert({'id':r.row, 'a':r.row.mod(4)}))
|
||||
rb: tbl2.insert((0..99).map{ |i| { :id => i, :b => i % 4 } })
|
||||
ot: {'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':100}
|
||||
|
||||
- cd: r.range(100).for_each(tbl3.insert({'id':r.row, 'a':r.row.mod(4), 'b':{'c':r.row.mod(5)}}))
|
||||
rb: tbl3.insert((0..99).map{ |i| { :id => i, :a => i % 4, :b => { :c => i % 5 } } })
|
||||
ot: {'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':100}
|
||||
|
||||
- def:
|
||||
cd: time1 = 1375115782.24
|
||||
js: time1 = 1375115782.24 * 1000
|
||||
|
||||
- def:
|
||||
cd: time2 = 1375147296.68
|
||||
js: time2 = 1375147296.68 * 1000
|
||||
|
||||
- cd:
|
||||
- tbl4.insert({'id':0, 'time':r.epoch_time(time1)})
|
||||
- tbl4.insert({'id':1, 'time':r.epoch_time(time2)})
|
||||
ot: {'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':1}
|
||||
|
||||
# GMR
|
||||
|
||||
- cd: tbl.sum('a')
|
||||
ot: 150
|
||||
- rb: tbl.map{|row| row['a']}.sum()
|
||||
py: tbl.map(lambda row:row['a']).sum()
|
||||
js: tbl.map(function(row){return row('a')}).sum()
|
||||
ot: 150
|
||||
- cd: tbl.group('a').sum('id')
|
||||
ot:
|
||||
cd: ({0:1200, 1:1225, 2:1250, 3:1275})
|
||||
js: ([{'group':0,'reduction':1200},{'group':1,'reduction':1225},{'group':2,'reduction':1250},{'group':3,'reduction':1275}])
|
||||
- cd: tbl.avg('a')
|
||||
ot: 1.5
|
||||
- rb: tbl.map{|row| row['a']}.avg()
|
||||
py: tbl.map(lambda row:row['a']).avg()
|
||||
js: tbl.map(function(row){return row('a')}).avg()
|
||||
ot: 1.5
|
||||
- cd: tbl.group('a').avg('id')
|
||||
ot:
|
||||
cd: {0:48, 1:49, 2:50, 3:51}
|
||||
js: [{'group':0,'reduction':48},{'group':1,'reduction':49},{'group':2,'reduction':50},{'group':3,'reduction':51}]
|
||||
- cd: tbl.min('a')['a']
|
||||
js: tbl.min('a')('a')
|
||||
ot: 0
|
||||
- cd: tbl.order_by('id').min('a')
|
||||
ot: {'a':0, 'id':0}
|
||||
- rb: tbl.map{|row| row['a']}.min()
|
||||
py: tbl.map(lambda row:row['a']).min()
|
||||
js: tbl.map(function(row){return row('a')}).min()
|
||||
ot: 0
|
||||
- cd: tbl.group('a').min('id')
|
||||
ot:
|
||||
cd: {0:{'a':0, 'id':0}, 1:{'a':1, 'id':1}, 2:{'a':2, 'id':2}, 3:{'a':3, 'id':3}}
|
||||
js: [{'group':0,'reduction':{'a':0, 'id':0}},{'group':1,'reduction':{'a':1, 'id':1}},{'group':2,'reduction':{'a':2, 'id':2}},{'group':3,'reduction':{'a':3, 'id':3}}]
|
||||
- cd: tbl.order_by('id').max('a')
|
||||
ot: {'a':3, 'id':3}
|
||||
- rb: tbl.map{|row| row['a']}.max()
|
||||
py: tbl.map(lambda row:row['a']).max()
|
||||
js: tbl.map(function(row){return row('a')}).max()
|
||||
ot: 3
|
||||
- cd: tbl.group('a').max('id')
|
||||
ot:
|
||||
cd: {0:{'a':0, 'id':96}, 1:{'a':1, 'id':97}, 2:{'a':2, 'id':98}, 3:{'a':3, 'id':99}}
|
||||
js: [{'group':0,'reduction':{'a':0, 'id':96}},{'group':1,'reduction':{'a':1, 'id':97}},{'group':2,'reduction':{'a':2, 'id':98}},{'group':3,'reduction':{'a':3, 'id':99}}]
|
||||
|
||||
- cd: tbl.min()
|
||||
ot: {"a":0, "id":0}
|
||||
- cd: tbl.group('a').min()
|
||||
ot:
|
||||
cd: {0:{"a":0, "id":0}, 1:{"a":1, "id":1}, 2:{"a":2, "id":2}, 3:{"a":3, "id":3}}
|
||||
js: [{'group':0,'reduction':{"a":0,"id":0}},{'group':1,'reduction':{"a":1,"id":1}},{'group':2,'reduction':{"a":2,"id":2}},{'group':3,'reduction':{"a":3,"id":3}}]
|
||||
- cd: tbl.max()
|
||||
ot: {"a":3, "id":99}
|
||||
- cd: tbl.group('a').max()
|
||||
ot:
|
||||
cd: {0:{'a':0, 'id':96}, 1:{'a':1, 'id':97}, 2:{'a':2, 'id':98}, 3:{'a':3, 'id':99}}
|
||||
js: [{'group':0,'reduction':{"a":0,"id":96}},{'group':1,'reduction':{"a":1,"id":97}},{'group':2,'reduction':{"a":2,"id":98}},{'group':3,'reduction':{"a":3,"id":99}}]
|
||||
|
||||
- rb: tbl.sum{|row| row['a']}
|
||||
py:
|
||||
- tbl.sum(lambda row:row['a'])
|
||||
- tbl.sum(r.row['a'])
|
||||
js:
|
||||
- tbl.sum(function(row){return row('a')})
|
||||
- tbl.sum(r.row('a'))
|
||||
ot: 150
|
||||
- rb: tbl.map{|row| row['a']}.sum()
|
||||
py: tbl.map(lambda row:row['a']).sum()
|
||||
js: tbl.map(function(row){return row('a')}).sum()
|
||||
ot: 150
|
||||
- rb: tbl.group{|row| row['a']}.sum{|row| row['id']}
|
||||
py: tbl.group(lambda row:row['a']).sum(lambda row:row['id'])
|
||||
js: tbl.group(function(row){return row('a')}).sum(function(row){return row('id')})
|
||||
ot:
|
||||
cd: {0:1200, 1:1225, 2:1250, 3:1275}
|
||||
js: [{'group':0,'reduction':1200},{'group':1,'reduction':1225},{'group':2,'reduction':1250},{'group':3,'reduction':1275}]
|
||||
- rb:
|
||||
- tbl.avg{|row| row['a']}
|
||||
py:
|
||||
- tbl.avg(lambda row:row['a'])
|
||||
- tbl.avg(r.row['a'])
|
||||
js:
|
||||
- tbl.avg(function(row){return row('a')})
|
||||
- tbl.avg(r.row('a'))
|
||||
ot: 1.5
|
||||
- rb: tbl.map{|row| row['a']}.avg()
|
||||
py: tbl.map(lambda row:row['a']).avg()
|
||||
js: tbl.map(function(row){return row('a')}).avg()
|
||||
ot: 1.5
|
||||
- rb: tbl.group{|row| row['a']}.avg{|row| row['id']}
|
||||
py: tbl.group(lambda row:row['a']).avg(lambda row:row['id'])
|
||||
js: tbl.group(function(row){return row('a')}).avg(function(row){return row('id')})
|
||||
ot:
|
||||
cd: {0:48, 1:49, 2:50, 3:51}
|
||||
js: [{'group':0,'reduction':48},{'group':1,'reduction':49},{'group':2,'reduction':50},{'group':3,'reduction':51}]
|
||||
- rb: tbl.order_by(r.desc('id')).min{|row| row['a']}
|
||||
py:
|
||||
- tbl.order_by(r.desc('id')).min(lambda row:row['a'])
|
||||
- tbl.order_by(r.desc('id')).min(r.row['a'])
|
||||
js:
|
||||
- tbl.order_by(r.desc('id')).min(function(row){return row('a')})
|
||||
- tbl.order_by(r.desc('id')).min(r.row('a'))
|
||||
ot: {'a':0, 'id':96}
|
||||
- rb:
|
||||
- tbl.order_by(r.desc('id')).min{|row| row['a']}['a']
|
||||
py:
|
||||
- tbl.order_by(r.desc('id')).min(lambda row:row['a'])['a']
|
||||
- tbl.order_by(r.desc('id')).min(r.row['a'])['a']
|
||||
js:
|
||||
- tbl.order_by(r.desc('id')).min(function(row){return row('a')})('a')
|
||||
- tbl.order_by(r.desc('id')).min(r.row('a'))('a')
|
||||
ot: 0
|
||||
- rb: tbl.map{|row| row['a']}.min()
|
||||
py: tbl.map(lambda row:row['a']).min()
|
||||
js: tbl.map(function(row){return row('a')}).min()
|
||||
ot: 0
|
||||
- rb: tbl.group{|row| row['a']}.min{|row| row['id']}['id']
|
||||
py: tbl.group(lambda row:row['a']).min(lambda row:row['id'])['id']
|
||||
js: tbl.group(function(row){return row('a')}).min(function(row){return row('id')})('id')
|
||||
ot:
|
||||
cd: {0:0, 1:1, 2:2, 3:3}
|
||||
js: [{'group':0,'reduction':0},{'group':1,'reduction':1},{'group':2,'reduction':2},{'group':3,'reduction':3}]
|
||||
- rb:
|
||||
- tbl.max{|row| row['a']}['a']
|
||||
py:
|
||||
- tbl.max(lambda row:row['a'])['a']
|
||||
- tbl.max(r.row['a'])['a']
|
||||
js:
|
||||
- tbl.max(function(row){return row('a')})('a')
|
||||
- tbl.max(r.row('a'))('a')
|
||||
ot: 3
|
||||
- rb: tbl.map{|row| row['a']}.max()
|
||||
py: tbl.map(lambda row:row['a']).max()
|
||||
js: tbl.map(function(row){return row('a')}).max()
|
||||
ot: 3
|
||||
- rb: tbl.group{|row| row['a']}.max{|row| row['id']}['id']
|
||||
py: tbl.group(lambda row:row['a']).max(lambda row:row['id'])['id']
|
||||
js: tbl.group(function(row){return row('a')}).max(function(row){return row('id')})('id')
|
||||
ot:
|
||||
cd: {0:96, 1:97, 2:98, 3:99}
|
||||
js: [{'group':0,'reduction':96},{'group':1,'reduction':97},{'group':2,'reduction':98},{'group':3,'reduction':99}]
|
||||
|
||||
- rb: tbl.group{|row| row[:a]}.map{|row| row[:id]}.reduce{|a,b| a+b}
|
||||
py: tbl.group(lambda row:row['a']).map(lambda row:row['id']).reduce(lambda a,b:a+b)
|
||||
js: tbl.group(function(row){return row('a')}).map(function(row){return row('id')}).reduce(function(a,b){return a.add(b)})
|
||||
ot:
|
||||
cd: {0:1200, 1:1225, 2:1250, 3:1275}
|
||||
js: [{'group':0,'reduction':1200},{'group':1,'reduction':1225},{'group':2,'reduction':1250},{'group':3,'reduction':1275}]
|
||||
|
||||
- rb: tbl.group{|row| row[:a]}.map{|row| row[:id]}.reduce{|a,b| a+b}
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
py:
|
||||
- tbl.group(lambda row:row['a']).map(lambda row:row['id']).reduce(lambda a,b:a+b)
|
||||
- tbl.group(r.row['a']).map(r.row['id']).reduce(lambda a,b:a + b)
|
||||
js:
|
||||
- tbl.group(function(row){return row('a')}).map(function(row){return row('id')}).reduce(function(a,b){return a.add(b)})
|
||||
- tbl.group(r.row('a')).map(r.row('id')).reduce(function(a,b){return a.add(b)})
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 1200], [1, 1225], [2, 1250], [3, 1275]]}
|
||||
|
||||
- cd: r.expr([{'a':1}]).filter(true).limit(1).group('a')
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[1, [{'a':1}]]]}
|
||||
|
||||
# GMR
|
||||
- cd: tbl.group('a').type_of()
|
||||
ot: "GROUPED_STREAM"
|
||||
- cd: tbl.group('a').count().type_of()
|
||||
ot: "GROUPED_DATA"
|
||||
- cd: tbl.group('a').coerce_to('ARRAY').type_of()
|
||||
ot: "GROUPED_DATA"
|
||||
|
||||
- rb: tbl.orderby(index:'id').filter{|row| row['id'].lt(10)}.group('a').map{|row| row['id']}.coerce_to('ARRAY')
|
||||
py: tbl.order_by(index='id').filter(lambda row:row['id'] < 10).group('a').map(lambda row:row['id']).coerce_to('ARRAY')
|
||||
js: tbl.orderBy({index:'id'}).filter(function(row){return row('id').lt(10)}).group('a').map(function(row){return row('id')}).coerce_to('ARRAY')
|
||||
ot:
|
||||
cd: {0:[0,4,8],1:[1,5,9],2:[2,6],3:[3,7]}
|
||||
js: [{'group':0,'reduction':[0,4,8]},{'group':1,'reduction':[1,5,9]},{'group':2,'reduction':[2,6]},{'group':3,'reduction':[3,7]}]
|
||||
|
||||
- rb: tbl.filter{|row| row['id'].lt(10)}.group('a').count().do{|x| x*x}
|
||||
py: tbl.filter(lambda row:row['id'] < 10).group('a').count().do(lambda x:x*x)
|
||||
js: tbl.filter(function(row){return row('id').lt(10)}).group('a').count().do(function(x){return x.mul(x)})
|
||||
ot:
|
||||
cd: {0:9,1:9,2:4,3:4}
|
||||
js: [{'group':0,'reduction':9},{'group':1,'reduction':9},{'group':2,'reduction':4},{'group':3,'reduction':4}]
|
||||
|
||||
- rb: tbl.union(tbl).group('a').map{|x| x['id']}.reduce{|a,b| a+b}
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
py:
|
||||
- tbl.union(tbl).group(lambda row:row['a']).map(lambda row:row['id']).reduce(lambda a,b:a + b)
|
||||
- tbl.union(tbl).group(r.row['a']).map(r.row['id']).reduce(lambda a,b:a + b)
|
||||
js:
|
||||
- tbl.union(tbl).group(function(row){return row('a')}).map(function(row){return row('id')}).reduce(function(a,b){return a.add(b)})
|
||||
- tbl.union(tbl).group(r.row('a')).map(r.row('id')).reduce(function(a,b){return a.add(b)})
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 2400], [1, 2450], [2, 2500], [3, 2550]]}
|
||||
|
||||
# GMR
|
||||
- rb: tbl.coerce_to("array").union(tbl).group('a').map{|x| x['id']}.reduce{|a,b| a+b}
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
py:
|
||||
- tbl.coerce_to("array").union(tbl).group(lambda row:row['a']).map(lambda row:row['id']).reduce(lambda a,b:a + b)
|
||||
- tbl.coerce_to("array").union(tbl).group(r.row['a']).map(r.row['id']).reduce(lambda a,b:a + b)
|
||||
js:
|
||||
- tbl.coerce_to("array").union(tbl).group(function(row){return row('a')}).map(function(row){return row('id')}).reduce(function(a,b){return a.add(b)})
|
||||
- tbl.coerce_to("array").union(tbl).group(r.row('a')).map(r.row('id')).reduce(function(a,b){return a.add(b)})
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 2400], [1, 2450], [2, 2500], [3, 2550]]}
|
||||
|
||||
# GMR
|
||||
- rb: tbl.union(tbl.coerce_to("array")).group('a').map{|x| x['id']}.reduce{|a,b| a+b}
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
py:
|
||||
- tbl.union(tbl.coerce_to("array")).group(lambda row:row['a']).map(lambda row:row['id']).reduce(lambda a,b:a + b)
|
||||
- tbl.union(tbl.coerce_to("array")).group(r.row['a']).map(r.row['id']).reduce(lambda a,b:a + b)
|
||||
js:
|
||||
- tbl.union(tbl.coerce_to("array")).group(function(row){return row('a')}).map(function(row){return row('id')}).reduce(function(a,b){return a.add(b)})
|
||||
- tbl.union(tbl.coerce_to("array")).group(r.row('a')).map(r.row('id')).reduce(function(a,b){return a.add(b)})
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 2400], [1, 2450], [2, 2500], [3, 2550]]}
|
||||
|
||||
- py:
|
||||
- tbl.group(lambda row:row['a']).map(lambda row:row['id']).reduce(lambda a,b:a + b)
|
||||
- tbl.group(r.row['a']).map(r.row['id']).reduce(lambda a,b:a + b)
|
||||
js:
|
||||
- tbl.group(function(row){return row('a')}).map(function(row){return row('id')}).reduce(function(a,b){return a.add(b)})
|
||||
- tbl.group(r.row('a')).map(r.row('id')).reduce(function(a,b){return a.add(b)})
|
||||
- tbl.group('a').map(r.row('id')).reduce(function(a,b){return a.add(b)})
|
||||
rb: tbl.group('a').map{|x| x['id']}.reduce{|a,b| a+b}
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 1200], [1, 1225], [2, 1250], [3, 1275]]}
|
||||
|
||||
# undefined...
|
||||
- js:
|
||||
- tbl.group(function(row){})
|
||||
- tbl.map(function(row){})
|
||||
- tbl.reduce(function(row){})
|
||||
- tbl.group(r.row('a')).group(function(row){})
|
||||
- tbl.group(r.row('a')).map(function(row){})
|
||||
- tbl.group(r.row('a')).reduce(function(row){})
|
||||
- tbl.map(r.row('id')).group(function(row){})
|
||||
- tbl.map(r.row('id')).map(function(row){})
|
||||
- tbl.map(r.row('id')).reduce(function(row){})
|
||||
- tbl.reduce(function(a,b){return a+b}).group(function(row){})
|
||||
- tbl.reduce(function(a,b){return a+b}).map(function(row){})
|
||||
- tbl.reduce(function(a,b){return a+b}).reduce(function(row){})
|
||||
ot: err('ReqlDriverCompileError', 'Anonymous function returned `undefined`. Did you forget a `return`?', [0])
|
||||
|
||||
# GroupBy
|
||||
|
||||
# COUNT
|
||||
|
||||
- cd: tbl.group('a').count()
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 25], [1, 25], [2, 25], [3, 25]]}
|
||||
|
||||
# SUM
|
||||
- cd: tbl.group('a').sum('id')
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 1200], [1, 1225], [2, 1250], [3, 1275]]}
|
||||
|
||||
# AVG
|
||||
- cd: tbl.group('a').avg('id')
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 48], [1, 49], [2, 50], [3, 51]]}
|
||||
|
||||
# Pattern Matching
|
||||
- rb: tbl3.group{|row| row['b']['c']}.count()
|
||||
py: tbl3.group(lambda row:row['b']['c']).count()
|
||||
js: tbl3.group(function(row){return row('b')('c')}).count()
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 20], [1, 20], [2, 20], [3, 20], [4, 20]]}
|
||||
|
||||
# Multiple keys
|
||||
- rb: tbl.group('a', lambda {|row| row['id']%3}).count()
|
||||
py: tbl.group('a', lambda row:row['id'].mod(3)).count()
|
||||
js: tbl.group('a', function(row){return row('id').mod(3)}).count()
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[[0, 0], 9], [[0, 1], 8], [[0, 2], 8], [[1, 0], 8], [[1, 1], 9], [[1, 2], 8], [[2, 0], 8], [[2, 1], 8], [[2, 2], 9], [[3, 0], 9], [[3, 1], 8], [[3, 2], 8]]}
|
||||
|
||||
# Grouping by time
|
||||
- rb: tbl4.group('time').coerce_to('array')
|
||||
runopts:
|
||||
time_format: 'raw'
|
||||
ot:
|
||||
rb: {{"$reql_type$":"TIME","epoch_time":1375115782.24,"timezone":"+00:00"}:[{"id":0,"time":{"$reql_type$":"TIME","epoch_time":1375115782.24,"timezone":"+00:00"}}],{"$reql_type$":"TIME","epoch_time":1375147296.68,"timezone":"+00:00"}:[{"id":1,"time":{"$reql_type$":"TIME","epoch_time":1375147296.68,"timezone":"+00:00"}}]}
|
||||
py: {frozenset([('$reql_type$','TIME'),('timezone','+00:00'),('epoch_time',1375115782.24)]):[{'id':0,'time':{'timezone':'+00:00','$reql_type$':'TIME','epoch_time':1375115782.24}}],frozenset([('$reql_type$','TIME'),('timezone','+00:00'),('epoch_time',1375147296.68)]):[{'id':1,'time':{'timezone':'+00:00','$reql_type$':'TIME','epoch_time':1375147296.68}}]}
|
||||
js: [{'group':{"$reql_type$":"TIME","epoch_time":1375115782240,"timezone":"+00:00"},'reduction':[{"id":0,"time":{"$reql_type$":"TIME","epoch_time":1375115782240,"timezone":"+00:00"}}]},{'group':{"$reql_type$":"TIME","epoch_time":1375147296680,"timezone":"+00:00"},'reduction':[{"id":1,"time":{"$reql_type$":"TIME","epoch_time":1375147296680,"timezone":"+00:00"}}]}]
|
||||
|
||||
# Distinct
|
||||
- py: tbl.map(lambda row:row['a']).distinct().count()
|
||||
js: tbl.map(function(row) { return row('a'); }).distinct().count()
|
||||
rb: tbl.map{ |row| row[:a] }.distinct.count
|
||||
ot: 4
|
||||
|
||||
- cd: tbl.distinct().type_of()
|
||||
ot: "STREAM"
|
||||
|
||||
- cd: tbl.distinct().count()
|
||||
ot: 100
|
||||
|
||||
- cd: tbl.distinct({index:'id'}).type_of()
|
||||
py: tbl.distinct(index='id').type_of()
|
||||
ot: "STREAM"
|
||||
|
||||
- cd: tbl.distinct({index:'id'}).count()
|
||||
py: tbl.distinct(index='id').count()
|
||||
ot: 100
|
||||
|
||||
- cd: tbl.index_create('a')
|
||||
ot: {'created':1}
|
||||
|
||||
- rb: tbl.index_create('m', multi:true){|row| [row['a'], row['a']]}
|
||||
ot: {'created':1}
|
||||
|
||||
- rb: tbl.index_create('m2', multi:true){|row| [1, 2]}
|
||||
ot: {'created':1}
|
||||
|
||||
- cd: tbl.index_wait('a').pluck('index', 'ready')
|
||||
ot: [{'index':'a','ready':true}]
|
||||
|
||||
- rb: tbl.index_wait('m').pluck('index', 'ready')
|
||||
ot: [{'index':'m','ready':true}]
|
||||
|
||||
- rb: tbl.index_wait('m2').pluck('index', 'ready')
|
||||
ot: [{'index':'m2','ready':true}]
|
||||
|
||||
- cd: tbl.between(0, 1, {index:'a'}).distinct().count()
|
||||
py: tbl.between(0, 1, index='a').distinct().count()
|
||||
ot: 25
|
||||
|
||||
- cd: tbl.between(0, 1, {index:'a'}).distinct({index:'id'}).count()
|
||||
py: tbl.between(0, 1, index='a').distinct(index='id').count()
|
||||
ot: 25
|
||||
|
||||
- rb: tbl.between(0, 1, {index:'m'}).count()
|
||||
ot: 50
|
||||
|
||||
- rb: tbl.between(0, 1, {index:'m'}).distinct().count()
|
||||
ot: 25
|
||||
|
||||
- rb: tbl.orderby({index:'m'}).count()
|
||||
ot: 200
|
||||
|
||||
- rb: tbl.orderby({index:'m'}).distinct().count()
|
||||
ot: 100
|
||||
|
||||
- rb: tbl.orderby({index:r.desc('m')}).count()
|
||||
ot: 200
|
||||
|
||||
- rb: tbl.orderby({index:r.desc('m')}).distinct().count()
|
||||
ot: 100
|
||||
|
||||
- rb: tbl.between(1, 3, {index:'m2'}).count()
|
||||
ot: 200
|
||||
|
||||
- rb: tbl.between(1, 3, {index:'m2'}).distinct().count()
|
||||
ot: 100
|
||||
|
||||
- rb: tbl.between(1, 3, {index:'m2'}).orderby(index:r.desc('m2')).distinct().count()
|
||||
ot: 100
|
||||
|
||||
- rb: tbl.between(0, 1, {index:'m'}).count()
|
||||
ot: 50
|
||||
|
||||
- rb: tbl.between(0, 1, {index:'m'}).distinct().count()
|
||||
ot: 25
|
||||
|
||||
- cd: tbl.distinct({index:'a'}).type_of()
|
||||
py: tbl.distinct(index='a').type_of()
|
||||
ot: "STREAM"
|
||||
|
||||
- cd: tbl.distinct({index:'a'}).count()
|
||||
py: tbl.distinct(index='a').count()
|
||||
ot: 4
|
||||
|
||||
- cd: tbl.group()
|
||||
ot: err('ReqlQueryLogicError', 'Cannot group by nothing.', [])
|
||||
|
||||
- py: tbl.group(index='id').count()
|
||||
js: tbl.group({index:'id'}).count()
|
||||
cd: tbl.group(index:'id').count
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: ({'$reql_type$':'GROUPED_DATA', 'data':[[0, 1], [1, 1], [2, 1], [3, 1], [4, 1], [5, 1], [6, 1], [7, 1], [8, 1], [9, 1], [10, 1], [11, 1], [12, 1], [13, 1], [14, 1], [15, 1], [16, 1], [17, 1], [18, 1], [19, 1], [20, 1], [21, 1], [22, 1], [23, 1], [24, 1], [25, 1], [26, 1], [27, 1], [28, 1], [29, 1], [30, 1], [31, 1], [32, 1], [33, 1], [34, 1], [35, 1], [36, 1], [37, 1], [38, 1], [39, 1], [40, 1], [41, 1], [42, 1], [43, 1], [44, 1], [45, 1], [46, 1], [47, 1], [48, 1], [49, 1], [50, 1], [51, 1], [52, 1], [53, 1], [54, 1], [55, 1], [56, 1], [57, 1], [58, 1], [59, 1], [60, 1], [61, 1], [62, 1], [63, 1], [64, 1], [65, 1], [66, 1], [67, 1], [68, 1], [69, 1], [70, 1], [71, 1], [72, 1], [73, 1], [74, 1], [75, 1], [76, 1], [77, 1], [78, 1], [79, 1], [80, 1], [81, 1], [82, 1], [83, 1], [84, 1], [85, 1], [86, 1], [87, 1], [88, 1], [89, 1], [90, 1], [91, 1], [92, 1], [93, 1], [94, 1], [95, 1], [96, 1], [97, 1], [98, 1], [99, 1]]})
|
||||
|
||||
- py: tbl.group(index='a').count()
|
||||
js: tbl.group({index:'a'}).count()
|
||||
rb: tbl.group(index:'a').count
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[0, 25], [1, 25], [2, 25], [3, 25]]}
|
||||
|
||||
- py: tbl.group('a', index='id').count()
|
||||
js: tbl.group('a', {index:'id'}).count()
|
||||
rb: tbl.group('a', index:'id').count
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[[0, 0], 1], [[0, 4], 1], [[0, 8], 1], [[0, 12], 1], [[0, 16], 1], [[0, 20], 1], [[0, 24], 1], [[0, 28], 1], [[0, 32], 1], [[0, 36], 1], [[0, 40], 1], [[0, 44], 1], [[0, 48], 1], [[0, 52], 1], [[0, 56], 1], [[0, 60], 1], [[0, 64], 1], [[0, 68], 1], [[0, 72], 1], [[0, 76], 1], [[0, 80], 1], [[0, 84], 1], [[0, 88], 1], [[0, 92], 1], [[0, 96], 1], [[1, 1], 1], [[1, 5], 1], [[1, 9], 1], [[1, 13], 1], [[1, 17], 1], [[1, 21], 1], [[1, 25], 1], [[1, 29], 1], [[1, 33], 1], [[1, 37], 1], [[1, 41], 1], [[1, 45], 1], [[1, 49], 1], [[1, 53], 1], [[1, 57], 1], [[1, 61], 1], [[1, 65], 1], [[1, 69], 1], [[1, 73], 1], [[1, 77], 1], [[1, 81], 1], [[1, 85], 1], [[1, 89], 1], [[1, 93], 1], [[1, 97], 1], [[2, 2], 1], [[2, 6], 1], [[2, 10], 1], [[2, 14], 1], [[2, 18], 1], [[2, 22], 1], [[2, 26], 1], [[2, 30], 1], [[2, 34], 1], [[2, 38], 1], [[2, 42], 1], [[2, 46], 1], [[2, 50], 1], [[2, 54], 1], [[2, 58], 1], [[2, 62], 1], [[2, 66], 1], [[2, 70], 1], [[2, 74], 1], [[2, 78], 1], [[2, 82], 1], [[2, 86], 1], [[2, 90], 1], [[2, 94], 1], [[2, 98], 1], [[3, 3], 1], [[3, 7], 1], [[3, 11], 1], [[3, 15], 1], [[3, 19], 1], [[3, 23], 1], [[3, 27], 1], [[3, 31], 1], [[3, 35], 1], [[3, 39], 1], [[3, 43], 1], [[3, 47], 1], [[3, 51], 1], [[3, 55], 1], [[3, 59], 1], [[3, 63], 1], [[3, 67], 1], [[3, 71], 1], [[3, 75], 1], [[3, 79], 1], [[3, 83], 1], [[3, 87], 1], [[3, 91], 1], [[3, 95], 1], [[3, 99], 1]]}
|
||||
|
||||
- py: tbl.group('a', index='a').count()
|
||||
js: tbl.group('a', {index:'a'}).count()
|
||||
rb: tbl.group('a', index:'a').count
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[[0, 0], 25], [[1, 1], 25], [[2, 2], 25], [[3, 3], 25]]}
|
||||
|
||||
- rb: tbl.group('a', lambda {|row| 'f'}, lambda {|row| []}, lambda {|row| [{}, [0], null, 0]}, multi:true).count
|
||||
py: tbl.group('a', lambda row:'f', lambda row:[], lambda row:[{}, [0], null, 0], multi=True).count()
|
||||
js: tbl.group('a', function(row){return 'f';}, function(row){return [];}, function(row){return [{}, [0], null, 0];}, {multi:true}).count()
|
||||
runopts:
|
||||
group_format: 'raw'
|
||||
ot: {'$reql_type$':'GROUPED_DATA', 'data':[[[0, "f", null, [0]], 25], [[0, "f", null, null], 25], [[0, "f", null, 0], 25], [[0, "f", null, {}], 25], [[1, "f", null, [0]], 25], [[1, "f", null, null], 25], [[1, "f", null, 0], 25], [[1, "f", null, {}], 25], [[2, "f", null, [0]], 25], [[2, "f", null, null], 25], [[2, "f", null, 0], 25], [[2, "f", null, {}], 25], [[3, "f", null, [0]], 25], [[3, "f", null, null], 25], [[3, "f", null, 0], 25], [[3, "f", null, {}], 25]]}
|
||||
|
||||
- cd: tbl.group('a').count().ungroup()
|
||||
ot: [{'group':0, 'reduction':25}, {'group':1, 'reduction':25}, {'group':2, 'reduction':25}, {'group':3, 'reduction':25}]
|
||||
|
||||
- cd: tbl.group('a').ungroup()['group']
|
||||
js: tbl.group('a').ungroup()('group')
|
||||
ot: [0, 1, 2, 3]
|
||||
|
||||
- py: tbl.order_by(index='id').limit(16).group('a','a').map(r.row['id']).sum().ungroup()
|
||||
js: tbl.order_by({index:'id'}).limit(16).group('a','a').map(r.row('id')).sum().ungroup()
|
||||
rb: tbl.order_by(index:'id').limit(16).group('a','a').map{|row| row['id']}.sum().ungroup()
|
||||
ot: [{'group':[0,0],'reduction':24},{'group':[1,1],'reduction':28},{'group':[2,2],'reduction':32},{'group':[3,3],'reduction':36}]
|
||||
|
||||
- cd: tbl.group('a', null).count().ungroup()
|
||||
ot: [{'group':[0,null],'reduction':25},{'group':[1,null],'reduction':25},{'group':[2,null],'reduction':25},{'group':[3,null],'reduction':25}]
|
||||
|
||||
- py: tbl.group('a', lambda row:[1,'two'], multi=True).count().ungroup()
|
||||
js: tbl.group('a', function(row){return [1,'two']},{multi:true}).count().ungroup()
|
||||
rb: tbl.group('a', lambda {|row| [1,'two']}, multi:true).count().ungroup()
|
||||
ot: [{'group':[0,1],'reduction':25},{'group':[0,'two'],'reduction':25},{'group':[1,1],'reduction':25},{'group':[1,'two'],'reduction':25},{'group':[2,1],'reduction':25},{'group':[2,'two'],'reduction':25},{'group':[3,1],'reduction':25},{'group':[3,'two'],'reduction':25}]
|
||||
|
||||
# proper test for seq.count()
|
||||
- cd: tbl.count()
|
||||
ot: 100
|
||||
|
||||
- js: tbl.filter(r.row('a').ne(1).and(r.row('id').gt(10))).update({'b':r.row('a').mul(10)})
|
||||
py: tbl.filter(r.row['a'].ne(1).and_(r.row['id'].gt(10))).update({'b':r.row['a'] * 10})
|
||||
rb: tbl.filter{|row| row['a'].ne(1).and(row['id'].gt(10))}.update{|row| {'b'=>row['a'] * 10}}
|
||||
ot: partial({'errors':0, 'replaced':67})
|
||||
|
||||
- cd: tbl.group('b').count()
|
||||
ot:
|
||||
cd: {null:33, 0:22, 20:22, 30:23}
|
||||
js: [{"group":null, "reduction":33}, {"group":0, "reduction":22}, {"group":20, "reduction":22}, {"group":30, "reduction":23}]
|
||||
|
||||
- cd: tbl.group('a').sum('b')
|
||||
ot:
|
||||
cd: {0:0, 2:440, 3:690}
|
||||
js: [{"group":0, "reduction":0}, {"group":2, "reduction":440}, {"group":3, "reduction":690}]
|
||||
|
||||
- cd: tbl.group('a').avg('b')
|
||||
ot:
|
||||
cd: {0:0, 2:20, 3:30}
|
||||
js: [{"group":0, "reduction":0}, {"group":2, "reduction":20}, {"group":3, "reduction":30}]
|
||||
|
||||
- cd: tbl.order_by('id').group('a').min('b')
|
||||
ot:
|
||||
cd: {0:{"a":0, "b":0, "id":12}, 2:{"a":2, "b":20, "id":14}, 3:{"a":3, "b":30, "id":11}}
|
||||
js: [{"group":0, "reduction":{"a":0, "b":0, "id":12}}, {"group":2, "reduction":{"a":2, "b":20, "id":14}}, {"group":3, "reduction":{"a":3, "b":30, "id":11}}]
|
||||
|
||||
- cd: tbl.order_by('id').group('a').min('id')
|
||||
ot:
|
||||
cd: {0:{"a":0, "id":0}, 1:{"a":1, "id":1}, 2:{"a":2, "id":2}, 3:{"a":3, "id":3}}
|
||||
js: [{"group":0, "reduction":{"a":0, "id":0}}, {"group":1, "reduction":{"a":1, "id":1}}, {"group":2, "reduction":{"a":2, "id":2}}, {"group":3, "reduction":{"a":3, "id":3}}]
|
||||
|
||||
- cd: tbl.order_by('id').group('a').max('b')
|
||||
ot:
|
||||
cd: {0:{"a":0, "b":0, "id":12}, 2:{"a":2, "b":20, "id":14}, 3:{"a":3, "b":30, "id":11}}
|
||||
js: [{"group":0, "reduction":{"a":0,"b":0, "id":12}}, {"group":2, "reduction":{"a":2, "b":20, "id":14}}, {"group":3, "reduction":{"a":3, "b":30, "id":11}}]
|
||||
|
||||
- cd: tbl.min()
|
||||
ot: {'a':0,'id':0}
|
||||
- py: tbl.min(index='id')
|
||||
rb: tbl.min(index:'id')
|
||||
js: tbl.min({index:'id'})
|
||||
ot: {'a':0,'id':0}
|
||||
- py: tbl.min(index='a')
|
||||
rb: tbl.min(index:'a')
|
||||
js: tbl.min({index:'a'})
|
||||
ot: {'a':0,'id':0}
|
||||
|
||||
- cd: tbl.max().without('b')
|
||||
ot: {'a':3,'id':99}
|
||||
- py: tbl.max(index='id').without('b')
|
||||
rb: tbl.max(index:'id').without('b')
|
||||
js: tbl.max({index:'id'}).without('b')
|
||||
ot: {'a':3,'id':99}
|
||||
- py: tbl.max(index='a').without('b')
|
||||
rb: tbl.max(index:'a').without('b')
|
||||
js: tbl.max({index:'a'}).without('b')
|
||||
ot: {'a':3,'id':99}
|
||||
|
||||
|
||||
# Infix
|
||||
|
||||
- cd: r.group([ 1, 1, 2 ], r.row).count().ungroup()
|
||||
rb: r.group([ 1, 1, 2 ]) {|row| row}.count().ungroup()
|
||||
ot: [ {'group': 1, 'reduction': 2}, {'group': 2, 'reduction': 1} ]
|
||||
- cd:
|
||||
- r.count([ 1, 2 ])
|
||||
- r.count([ 1, 2 ], r.row.gt(0))
|
||||
rb:
|
||||
- r.count([ 1, 2 ])
|
||||
- r.count([ 1, 2 ]) {|row| row.gt(0)}
|
||||
ot: 2
|
||||
- cd:
|
||||
- r.sum([ 1, 2 ])
|
||||
- r.sum([ 1, 2 ], r.row)
|
||||
rb: r.sum([ 1, 2 ])
|
||||
ot: 3
|
||||
- cd:
|
||||
- r.avg([ 1, 2 ])
|
||||
- r.avg([ 1, 2 ], r.row)
|
||||
rb: r.avg([ 1, 2 ])
|
||||
ot: 1.5
|
||||
- cd:
|
||||
- r.min([ 1, 2 ])
|
||||
- r.min([ 1, 2 ], r.row)
|
||||
rb: r.min([ 1, 2 ])
|
||||
ot: 1
|
||||
- cd:
|
||||
- r.max([ 1, 2 ])
|
||||
- r.max([ 1, 2 ], r.row)
|
||||
rb: r.max([ 1, 2 ])
|
||||
ot: 2
|
||||
- cd: r.distinct([ 1, 1 ])
|
||||
ot: [ 1 ]
|
||||
- cd:
|
||||
- r.contains([ 1, 2 ])
|
||||
- r.contains([ 1, 2 ], r.row.gt(0))
|
||||
rb:
|
||||
- r.contains([ 1, 2 ])
|
||||
- r.contains([ 1, 2 ]) {|row| row.gt(0)}
|
||||
ot: true
|
@ -1,316 +0,0 @@
|
||||
desc: Test the arity of every function
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# TODO: add test for slice (should require one or two arguments)
|
||||
|
||||
# Set up some data
|
||||
- def: db = r.db('test')
|
||||
- def: obj = r.expr({'a':1})
|
||||
- def: array = r.expr([1])
|
||||
|
||||
- ot: err("ReqlCompileError", "Expected 0 arguments but found 1.", [])
|
||||
cd: r.db_list(1)
|
||||
|
||||
- ot: err("ReqlCompileError", "Expected 1 argument but found 2.", [])
|
||||
cd:
|
||||
- tbl.zip(1)
|
||||
- tbl.is_empty(1)
|
||||
- obj.keys(1)
|
||||
|
||||
- cd: tbl.distinct(1)
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 1 argument but found 2.", [])
|
||||
js: err("ReqlCompileError", "Expected 0 arguments (not including options) but found 1.", [])
|
||||
|
||||
- cd: tbl.delete(1)
|
||||
ot:
|
||||
js: err("ReqlCompileError", "Expected 0 arguments (not including options) but found 1.", [])
|
||||
cd: err("ReqlCompileError", "Expected 1 argument but found 2.", [])
|
||||
|
||||
- rb: db.table_list(1)
|
||||
ot: err("ReqlCompileError", "Expected between 0 and 1 arguments but found 2.", [])
|
||||
|
||||
- ot: err("ReqlCompileError", "Expected 1 argument but found 0.", [])
|
||||
cd:
|
||||
- r.db_create()
|
||||
- r.db_drop()
|
||||
- r.db()
|
||||
- r.floor()
|
||||
- r.ceil()
|
||||
- r.round()
|
||||
|
||||
- cd: r.error()
|
||||
ot: err("ReqlQueryLogicError", "Empty ERROR term outside a default block.", [])
|
||||
|
||||
- cd: r.js()
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 1 argument but found 0.", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 0.", [])
|
||||
|
||||
- cd: r.expr()
|
||||
ot:
|
||||
py3.3: err_regex('TypeError', '.* missing 1 required positional argument.*', [])
|
||||
py3.4: err_regex('TypeError', '.* missing 1 required positional argument.*', [])
|
||||
py3.5: err_regex('TypeError', '.* missing 1 required positional argument.*', [])
|
||||
py: err_regex('TypeError', ".* takes at least 1 (?:positional )?argument \(0 given\)", [])
|
||||
js: err("ReqlCompileError", "Expected between 1 and 2 arguments but found 0.", [])
|
||||
rb: err("ArgumentError", 'wrong number of arguments (0 for 1)', [])
|
||||
rb2: err("ArgumentError", 'wrong number of arguments (0 for 1..2)', [])
|
||||
|
||||
- ot: err("ReqlCompileError", "Expected 2 arguments but found 1.", [])
|
||||
cd:
|
||||
- tbl.concat_map()
|
||||
- tbl.skip()
|
||||
- tbl.limit()
|
||||
- array.append()
|
||||
- array.prepend()
|
||||
- array.difference()
|
||||
- array.set_insert()
|
||||
- array.set_union()
|
||||
- array.set_intersection()
|
||||
- array.set_difference()
|
||||
- tbl.nth()
|
||||
- tbl.for_each()
|
||||
- tbl.get()
|
||||
- r.expr([]).sample()
|
||||
- tbl.offsets_of()
|
||||
- ot: err("ReqlCompileError", "Expected 1 argument but found 2.", [])
|
||||
cd:
|
||||
- r.db_create(1,2)
|
||||
- r.db_drop(1,2)
|
||||
- r.db(1,2)
|
||||
- r.floor(1, 2)
|
||||
- r.ceil(1, 2)
|
||||
- r.round(1, 2)
|
||||
|
||||
- cd: tbl.filter()
|
||||
ot:
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 0.", [])
|
||||
cd: err("ReqlCompileError", "Expected 2 arguments but found 1.", [])
|
||||
|
||||
- cd: r.error(1, 2)
|
||||
ot: err("ReqlCompileError", "Expected between 0 and 1 arguments but found 2.", [])
|
||||
|
||||
- cd: db.table_drop()
|
||||
ot: err("ReqlQueryLogicError", "Expected type DATUM but found DATABASE:", [])
|
||||
|
||||
|
||||
- cd: db.table_create()
|
||||
ot:
|
||||
cd: err("ReqlQueryLogicError", "Expected type DATUM but found DATABASE:", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 0.", [])
|
||||
|
||||
- cd: r.js(1,2)
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 1 argument but found 2.", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 2.", [])
|
||||
|
||||
- ot: err("ReqlCompileError", "Expected 2 arguments but found 3.", [])
|
||||
cd:
|
||||
- tbl.concat_map(1,2)
|
||||
- tbl.skip(1,2)
|
||||
- tbl.limit(1,2)
|
||||
- array.append(1,2)
|
||||
- array.prepend(1,2)
|
||||
- array.difference([], [])
|
||||
- array.set_insert(1,2)
|
||||
- array.set_union([1],[2])
|
||||
- array.set_intersection([1],[2])
|
||||
- array.set_difference([1],[2])
|
||||
- tbl.nth(1,2)
|
||||
- tbl.for_each(1,2)
|
||||
- tbl.get(1,2)
|
||||
- r.expr([]).sample(1,2)
|
||||
- tbl.offsets_of(1,2)
|
||||
|
||||
- cd: tbl.filter(1,2,3)
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 2 arguments but found 4.", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 3.", [])
|
||||
|
||||
- cd: db.table_drop(1,2)
|
||||
ot: err("ReqlCompileError", "Expected between 1 and 2 arguments but found 3.", [])
|
||||
|
||||
- cd: r.expr([]).delete_at()
|
||||
ot: err("ReqlCompileError", "Expected between 2 and 3 arguments but found 1.", [])
|
||||
|
||||
- cd: db.table_create(1,2)
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected between 1 and 2 arguments but found 3.", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 2.", [])
|
||||
|
||||
- cd: tbl.count(1,2)
|
||||
ot: err("ReqlCompileError", "Expected between 1 and 2 arguments but found 3.", [])
|
||||
|
||||
- ot:
|
||||
cd: err("ReqlCompileError", "Expected 2 arguments but found 1.", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 0.", [])
|
||||
cd:
|
||||
- tbl.update()
|
||||
- tbl.replace()
|
||||
- tbl.insert()
|
||||
|
||||
- cd: db.table()
|
||||
ot:
|
||||
cd: err("ReqlQueryLogicError", "Expected type DATUM but found DATABASE:", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 0.", [])
|
||||
|
||||
- cd: tbl.reduce()
|
||||
ot: err("ReqlCompileError", "Expected 2 arguments but found 1.", [])
|
||||
|
||||
- cd: tbl.eq_join()
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 3 arguments but found 1.", [])
|
||||
js: err("ReqlCompileError", "Expected 2 arguments (not including options) but found 0.", [])
|
||||
|
||||
- ot: err("ReqlCompileError", "Expected 3 arguments but found 1.", [])
|
||||
cd:
|
||||
- tbl.inner_join()
|
||||
- tbl.outer_join()
|
||||
- r.expr([]).insert_at()
|
||||
- r.expr([]).splice_at()
|
||||
- r.expr([]).change_at()
|
||||
|
||||
- cd: tbl.eq_join(1)
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 3 arguments but found 2.", [])
|
||||
js: err("ReqlCompileError", "Expected 2 arguments (not including options) but found 1.", [])
|
||||
|
||||
- ot: err("ReqlCompileError", "Expected 3 arguments but found 2.", [])
|
||||
cd:
|
||||
- tbl.inner_join(1)
|
||||
- tbl.outer_join(1)
|
||||
- r.expr([]).insert_at(1)
|
||||
- r.expr([]).splice_at(1)
|
||||
- r.expr([]).change_at(1)
|
||||
|
||||
- cd: tbl.eq_join(1,2,3,4)
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 3 arguments but found 5.", [])
|
||||
js: err("ReqlCompileError", "Expected 2 arguments (not including options) but found 4.", [])
|
||||
|
||||
- ot: err("ReqlCompileError", "Expected 3 arguments but found 4.", [])
|
||||
cd:
|
||||
- tbl.inner_join(1,2,3)
|
||||
- tbl.outer_join(1,2,3)
|
||||
- r.expr([]).insert_at(1, 2, 3)
|
||||
- r.expr([]).splice_at(1, 2, 3)
|
||||
- r.expr([]).change_at(1, 2, 3)
|
||||
|
||||
- cd: tbl.map()
|
||||
ot:
|
||||
cd: err('ReqlCompileError', "Expected 2 or more arguments but found 1.", [])
|
||||
js: err('ReqlCompileError', "Expected 1 or more arguments but found 0.", [])
|
||||
|
||||
- cd: r.branch(1,2)
|
||||
ot: err("ReqlCompileError", "Expected 3 or more arguments but found 2.", [])
|
||||
- cd: r.branch(1,2,3,4)
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `branch` term with an even number of arguments.", [])
|
||||
|
||||
- cd: r.expr({})[1,2]
|
||||
js: r.expr({})(1,2)
|
||||
ot:
|
||||
js: err('ReqlCompileError', "Expected 1 argument but found 2.", [])
|
||||
py: err('ReqlQueryLogicError', 'Expected NUMBER or STRING as second argument to `bracket` but found ARRAY.')
|
||||
rb: err('ArgumentError', 'wrong number of arguments (2 for 1)')
|
||||
|
||||
- cd: tbl.insert([{'id':0},{'id':1},{'id':2},{'id':3},{'id':4},{'id':5},{'id':6},{'id':7},{'id':8},{'id':9}]).get_field('inserted')
|
||||
ot: 10
|
||||
|
||||
- cd: tbl.get_all(0, 1, 2).get_field('id')
|
||||
ot: bag([0, 1, 2])
|
||||
|
||||
- cd: tbl.get_all(r.args([]), 0, 1, 2).get_field('id')
|
||||
ot: bag([0, 1, 2])
|
||||
|
||||
- cd: tbl.get_all(r.args([0]), 1, 2).get_field('id')
|
||||
ot: bag([0, 1, 2])
|
||||
|
||||
- cd: tbl.get_all(r.args([0, 1]), 2).get_field('id')
|
||||
ot: bag([0, 1, 2])
|
||||
|
||||
- cd: tbl.get_all(r.args([0, 1, 2])).get_field('id')
|
||||
ot: bag([0, 1, 2])
|
||||
|
||||
- cd: tbl.get_all(r.args([0]), 1, r.args([2])).get_field('id')
|
||||
ot: bag([0, 1, 2])
|
||||
|
||||
# Make sure partial-evaluation still works
|
||||
|
||||
- cd: r.branch(true, 1, r.error("a"))
|
||||
ot: 1
|
||||
|
||||
- cd: r.branch(r.args([true, 1]), r.error("a"))
|
||||
ot: 1
|
||||
|
||||
- cd: r.expr(true).branch(1, 2)
|
||||
ot: 1
|
||||
|
||||
- cd: r.branch(r.args([true, 1, r.error("a")]))
|
||||
ot: err("ReqlUserError", "a", [])
|
||||
|
||||
# Make sure our grouped data hack still works
|
||||
|
||||
- rb: tbl.group{|row| row['id'] % 2}.count({'id':0}).ungroup()
|
||||
py: tbl.group(lambda row:row['id'].mod(2)).count({'id':0}).ungroup()
|
||||
js: tbl.group(r.row('id').mod(2)).count({'id':0}).ungroup()
|
||||
ot: ([{'group':0, 'reduction':1}])
|
||||
|
||||
- rb: tbl.group{|row| row['id'] % 2}.count(r.args([{'id':0}])).ungroup()
|
||||
py: tbl.group(r.row['id'].mod(2)).count(r.args([{'id':0}])).ungroup()
|
||||
js: tbl.group(r.row('id').mod(2)).count(r.args([{'id':0}])).ungroup()
|
||||
ot: ([{'group':0, 'reduction':1}])
|
||||
|
||||
# Make sure `r.literal` still works
|
||||
|
||||
- cd: r.expr({'a':{'b':1}}).merge(r.args([{'a':r.literal({'c':1})}]))
|
||||
ot: ({'a':{'c':1}})
|
||||
|
||||
- cd: r.http("httpbin.org/get","bad_param")
|
||||
ot:
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 2.", [])
|
||||
rb: err("ReqlCompileError", "Expected 1 argument but found 2.", [])
|
||||
py: err_regex('TypeError', ".*takes exactly 1 argument \(2 given\)", [])
|
||||
py3.0: err_regex('TypeError', ".*takes exactly 1 positional argument \(2 given\)", [])
|
||||
py3.1: err_regex('TypeError', ".*takes exactly 1 positional argument \(2 given\)", [])
|
||||
py3.2: err_regex('TypeError', ".*takes exactly 1 positional argument \(2 given\)", [])
|
||||
py3: err_regex('TypeError', ".*takes 1 positional argument but 2 were given", [])
|
||||
|
||||
- cd: r.binary("1", "2")
|
||||
ot:
|
||||
py: err_regex('TypeError', ".*takes exactly 1 argument \(2 given\)", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument but found 2.", [])
|
||||
rb: err("ReqlCompileError", "Expected 1 argument but found 2.", [])
|
||||
py3.0: err_regex('TypeError', ".*takes exactly 1 positional argument \(2 given\)", [])
|
||||
py3.1: err_regex('TypeError', ".*takes exactly 1 positional argument \(2 given\)", [])
|
||||
py3.2: err_regex('TypeError', ".*takes exactly 1 positional argument \(2 given\)", [])
|
||||
py3: err_regex('TypeError', ".*takes 1 positional argument but 2 were given", [])
|
||||
- cd: r.binary()
|
||||
ot:
|
||||
py: err_regex('TypeError', ".*takes exactly 1 argument \(0 given\)", [])
|
||||
js: err("ReqlCompileError", "Expected 1 argument but found 0.", [])
|
||||
rb: err("ReqlCompileError", "Expected 1 argument but found 0.", [])
|
||||
py3.0: err_regex('TypeError', ".*takes exactly 1 positional argument \(0 given\)", [])
|
||||
py3.1: err_regex('TypeError', ".*takes exactly 1 positional argument \(0 given\)", [])
|
||||
py3.2: err_regex('TypeError', ".*takes exactly 1 argument \(0 given\)", [])
|
||||
py3: err_regex('TypeError', ".* missing 1 required positional argument.*", [])
|
||||
|
||||
# TODO: Math and logic
|
||||
# TODO: Upper bound on optional arguments
|
||||
# TODO: between, merge, slice
|
||||
|
||||
- cd: tbl.index_rename('idx')
|
||||
ot:
|
||||
cd: err('ReqlCompileError','Expected 3 arguments but found 2.',[])
|
||||
js: err('ReqlCompileError','Expected 2 arguments (not including options) but found 1.',[])
|
||||
|
||||
- cd: tbl.index_rename('idx','idx2','idx3')
|
||||
ot:
|
||||
cd: err('ReqlCompileError','Expected 3 arguments but found 4.',[])
|
||||
js: err('ReqlCompileError','Expected 2 arguments (not including options) but found 3.',[])
|
||||
|
||||
- cd:
|
||||
- r.now('foo')
|
||||
- r.now(r.args([1,2,3]))
|
||||
ot: err('ReqlCompileError','NOW does not accept any args.')
|
@ -1,142 +0,0 @@
|
||||
desc: Test edge cases of changefeed operations
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
- def: common_prefix = r.expr([0,1,2,3,4,5,6,7,8])
|
||||
|
||||
- js: tbl.indexCreate('sindex', function (row) { return common_prefix.append(row('value')); })
|
||||
py: tbl.index_create('sindex', lambda row:common_prefix.append(row['value']))
|
||||
rb: tbl.index_create('sindex'){ |row| common_prefix.append(row['value']) }
|
||||
ot: ({'created':1})
|
||||
- cd: tbl.index_wait('sindex')
|
||||
|
||||
# create target values
|
||||
- cd: pre = r.range(7).coerce_to('array').add(r.range(10,70).coerce_to('array')).append(100).map(r.row.coerce_to('string'))
|
||||
rb: pre = r.range(7).coerce_to('array').add(r.range(10,70).coerce_to('array')).append(100).map{ |row| row.coerce_to('string') }
|
||||
- cd: mid = r.range(2,9).coerce_to('array').add(r.range(20,90).coerce_to('array')).map(r.row.coerce_to('string'))
|
||||
rb: mid = r.range(2,9).coerce_to('array').add(r.range(20,90).coerce_to('array')).map{ |row| row.coerce_to('string') }
|
||||
- cd: post = r.range(3,10).coerce_to('array').add(r.range(30,100).coerce_to('array')).map(r.row.coerce_to('string'))
|
||||
rb: post = r.range(3,10).coerce_to('array').add(r.range(30,100).coerce_to('array')).map{ |row| row.coerce_to('string') }
|
||||
|
||||
- cd: erroredres = r.range(2).coerce_to('array').add(r.range(10, 20).coerce_to('array')).append(100).map(r.row.coerce_to('string'))
|
||||
rb: erroredres = r.range(2).coerce_to('array').add(r.range(10, 20).coerce_to('array')).append(100).map{ |val| val.coerce_to('string') }
|
||||
|
||||
# Start overlapping changefeeds
|
||||
- js: pre_changes = tbl.between(r.minval, commonPrefix.append('7'), {index:'sindex'}).changes({squash:false}).limit(pre.length)('new_val')('value')
|
||||
py: pre_changes = tbl.between(r.minval, common_prefix.append('7'), index='sindex').changes(squash=False).limit(len(pre))['new_val']['value']
|
||||
rb: pre_changes = tbl.between(r.minval, common_prefix.append('7'), index:'sindex').changes(squash:false).limit(pre.length)['new_val']['value']
|
||||
- js: mid_changes = tbl.between(commonPrefix.append('2'), common_prefix.append('9'), {index:'sindex'}).changes({squash:false}).limit(post.length)('new_val')('value')
|
||||
py: mid_changes = tbl.between(common_prefix.append('2'), common_prefix.append('9'), index='sindex').changes(squash=False).limit(len(post))['new_val']['value']
|
||||
rb: mid_changes = tbl.between(common_prefix.append('2'), common_prefix.append('9'), index:'sindex').changes(squash:false).limit(post.length)['new_val']['value']
|
||||
- js: post_changes = tbl.between(commonPrefix.append('3'), r.maxval, {index:'sindex'}).changes({squash:false}).limit(mid.length)('new_val')('value')
|
||||
py: post_changes = tbl.between(common_prefix.append('3'), r.maxval, index='sindex').changes(squash=False).limit(len(mid))['new_val']['value']
|
||||
rb: post_changes = tbl.between(common_prefix.append('3'), r.maxval, index:'sindex').changes(squash:false).limit(mid.length)['new_val']['value']
|
||||
|
||||
# Start changefeeds with non-existence errors
|
||||
|
||||
- js: premap_changes1 = tbl.map(r.branch(r.row('value').lt('2'), r.row, r.row("dummy"))).changes({squash:false}).limit(erroredres.length)('new_val')('value')
|
||||
py: premap_changes1 = tbl.map(r.branch(r.row['value'].lt('2'), r.row, r.row["dummy"])).changes(squash=False).limit(len(erroredres))['new_val']['value']
|
||||
rb: premap_changes1 = tbl.map{ |row| r.branch(row['value'].lt('2'), row, row["dummy"]) }.changes(squash:false).limit(erroredres.length)['new_val']['value']
|
||||
|
||||
- js: postmap_changes1 = tbl.changes({squash:false}).map(r.branch(r.row('new_val')('value').lt('2'), r.row, r.row("dummy"))).limit(erroredres.length)('new_val')('value')
|
||||
py: postmap_changes1 = tbl.changes(squash=False).map(r.branch(r.row['new_val']['value'].lt('2'), r.row, r.row["dummy"])).limit(len(erroredres))['new_val']['value']
|
||||
rb: postmap_changes1 = tbl.changes(squash:false).map{ |row| r.branch(row['new_val']['value'].lt('2'), row, row["dummy"]) }.limit(erroredres.length)['new_val']['value']
|
||||
|
||||
- js: prefilter_changes1 = tbl.filter(r.branch(r.row('value').lt('2'), true, r.row("dummy"))).changes({squash:false}).limit(erroredres.length)('new_val')('value')
|
||||
py: prefilter_changes1 = tbl.filter(r.branch(r.row['value'].lt('2'), True, r.row["dummy"])).changes(squash=False).limit(len(erroredres))['new_val']['value']
|
||||
rb: prefilter_changes1 = tbl.filter{ |row| r.branch(row['value'].lt('2'), true, row["dummy"]) }.changes(squash:false).limit(erroredres.length)['new_val']['value']
|
||||
|
||||
- js: postfilter_changes1 = tbl.changes({squash:false}).filter(r.branch(r.row('new'+'_'+'val')('value').lt('2'), true, r.row("dummy"))).limit(erroredres.length)('new_val')('value')
|
||||
py: postfilter_changes1 = tbl.changes(squash=False).filter(r.branch(r.row['new_val']['value'].lt('2'), True, r.row["dummy"])).limit(len(erroredres))['new_val']['value']
|
||||
rb: postfilter_changes1 = tbl.changes(squash:false).filter{ |row| r.branch(row['new_val']['value'].lt('2'), true, row["dummy"]) }.limit(erroredres.length)['new_val']['value']
|
||||
|
||||
# Start changefeeds with runtime errors
|
||||
|
||||
- js: premap_changes2 = tbl.map(r.branch(r.row('value').lt('2'), r.row, r.expr([]).nth(1))).changes({squash:false}).limit(erroredres.length)('new_val')('value')
|
||||
py: premap_changes2 = tbl.map(r.branch(r.row['value'].lt('2'), r.row, r.expr([])[1])).changes(squash=False).limit(len(erroredres))['new_val']['value']
|
||||
rb: premap_changes2 = tbl.map{ |row| r.branch(row['value'].lt('2'), row, r.expr([])[1]) }.changes(squash:false).limit(erroredres.length)['new_val']['value']
|
||||
|
||||
- js: postmap_changes2 = tbl.changes({squash:false}).map(r.branch(r.row('new'+'_'+'val')('value').lt('2'), r.row, r.expr([]).nth(1))).limit(erroredres.length)('new_val')('value')
|
||||
py: postmap_changes2 = tbl.changes(squash=False).map(r.branch(r.row['new_val']['value'].lt('2'), r.row, r.expr([])[1])).limit(len(erroredres))['new_val']['value']
|
||||
rb: postmap_changes2 = tbl.changes(squash:false).map{ |row| r.branch(row['new_val']['value'].lt('2'), row, r.expr([])[1]) }.limit(erroredres.length)['new_val']['value']
|
||||
|
||||
- js: prefilter_changes2 = tbl.filter(r.branch(r.row('value').lt('2'), true, r.expr([]).nth(1))).changes({squash:false}).limit(erroredres.length)('new_val')('value')
|
||||
py: prefilter_changes2 = tbl.filter(r.branch(r.row['value'].lt('2'), True, r.expr([])[1])).changes(squash=False).limit(len(erroredres))['new_val']['value']
|
||||
rb: prefilter_changes2 = tbl.filter{ |row| r.branch(row['value'].lt('2'), true, r.expr([])[1]) }.changes(squash:false).limit(erroredres.length)['new_val']['value']
|
||||
|
||||
- js: postfilter_changes2 = tbl.changes({squash:false}).filter(r.branch(r.row('new'+'_'+'val')('value').lt('2'), true, r.expr([]).nth(1))).limit(erroredres.length)('new_val')('value')
|
||||
py: postfilter_changes2 = tbl.changes(squash=False).filter(r.branch(r.row['new_val']['value'].lt('2'), True, r.expr([])[1])).limit(len(erroredres))['new_val']['value']
|
||||
rb: postfilter_changes2 = tbl.changes(squash:false).filter{ |row| r.branch(row['new_val']['value'].lt('2'), true, r.expr([])[1]) }.limit(erroredres.length)['new_val']['value']
|
||||
|
||||
# Start non-deterministic changefeeds - very small chance of these hanging due to not enough results
|
||||
- def:
|
||||
py: nondetermmap = r.branch(r.random().gt(0.5), r.row, r.error("dummy"))
|
||||
js: nondetermmap = function (row) { return r.branch(r.random().gt(0.5), row, r.error("dummy")); }
|
||||
rb: nondetermmap = Proc.new { |row| r.branch(r.random().gt(0.5), row, r.error("dummy")) }
|
||||
- def:
|
||||
py: nondetermfilter = lambda row:r.random().gt(0.5)
|
||||
js: nondetermfilter = function (row) { return r.random().gt(0.5); }
|
||||
rb: nondetermfilter = Proc.new { |row| r.random().gt(0.5) }
|
||||
|
||||
- rb: tbl.map(nondetermmap).changes(squash:false)
|
||||
js: tbl.map(nondetermmap).changes({squash:false})
|
||||
py: tbl.map(nondetermmap).changes(squash=False)
|
||||
ot: err('ReqlQueryLogicError', 'Cannot call `changes` after a non-deterministic function.')
|
||||
|
||||
- rb: postmap_changes3 = tbl.changes(squash:false).map(nondetermmap).limit(100)
|
||||
js: postmap_changes3 = tbl.changes({squash:false}).map(nondetermmap).limit(100)
|
||||
py: postmap_changes3 = tbl.changes(squash=False).map(nondetermmap).limit(100)
|
||||
|
||||
- rb: tbl.filter(nondetermfilter).changes(squash:false)
|
||||
js: tbl.filter(nondetermfilter).changes({squash:false})
|
||||
py: tbl.filter(nondetermfilter).changes(squash=False)
|
||||
ot: err('ReqlQueryLogicError', 'Cannot call `changes` after a non-deterministic function.')
|
||||
|
||||
- rb: postfilter_changes3 = tbl.changes(squash:false).filter(nondetermfilter).limit(4)
|
||||
js: postfilter_changes3 = tbl.changes({squash:false}).filter(nondetermfilter).limit(4)
|
||||
py: postfilter_changes3 = tbl.changes(squash=False).filter(nondetermfilter).limit(4)
|
||||
|
||||
# Insert several rows that will and will not be returned
|
||||
- cd: tbl.insert(r.range(101).map({'id':r.uuid().coerce_to('binary').slice(0,r.random(4,24)).coerce_to('string'),'value':r.row.coerce_to('string')}))
|
||||
rb: tbl.insert(r.range(101).map{ |row| {'id'=>r.uuid().coerce_to('binary').slice(0,r.random(4,24)).coerce_to('string'),'value'=>row.coerce_to('string')}})
|
||||
ot: ({'skipped':0,'deleted':0,'unchanged':0,'errors':0,'replaced':0,'inserted':101})
|
||||
|
||||
# Check that our limited watchers have been satified
|
||||
- cd: pre_changes
|
||||
ot: bag(pre)
|
||||
|
||||
- cd: mid_changes
|
||||
ot: bag(mid)
|
||||
|
||||
- cd: post_changes
|
||||
ot: bag(post)
|
||||
|
||||
- cd: premap_changes1
|
||||
ot: bag(erroredres)
|
||||
|
||||
- cd: premap_changes2
|
||||
ot: bag(erroredres)
|
||||
|
||||
- cd: postmap_changes1
|
||||
ot: err('ReqlNonExistenceError', "No attribute `dummy` in object:")
|
||||
|
||||
- cd: postmap_changes2
|
||||
ot: err('ReqlNonExistenceError', "Index out of bounds:" + " 1")
|
||||
|
||||
- cd: postmap_changes3
|
||||
ot: err('ReqlUserError', "dummy")
|
||||
|
||||
- cd: prefilter_changes1
|
||||
ot: bag(erroredres)
|
||||
|
||||
- cd: prefilter_changes2
|
||||
ot: bag(erroredres)
|
||||
|
||||
- cd: postfilter_changes1
|
||||
ot: bag(erroredres)
|
||||
|
||||
- cd: postfilter_changes2
|
||||
ot: bag(erroredres)
|
||||
|
||||
- ot: arrlen(postfilter_changes3)
|
||||
ot: 4
|
@ -1,27 +0,0 @@
|
||||
desc: Geo indexed changefeed operations
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- rb: tbl.index_create('L', {geo: true})
|
||||
ot: partial({'created': 1})
|
||||
|
||||
- rb: tbl.index_wait().count
|
||||
ot: 1
|
||||
|
||||
- def: obj11 = {id: "11", L: r.point(1,1)}
|
||||
- def: obj12 = {id: "12", L: r.point(1,2)}
|
||||
- def: obj21 = {id: "21", L: r.point(2,1)}
|
||||
- def: obj22 = {id: "22", L: r.point(2,2)}
|
||||
|
||||
# A distance of 130,000 meters from 1,1 is enough to cover 1,2 and 2,1 (~110km
|
||||
# distance) but not 2,2 (~150km distance.)
|
||||
#
|
||||
# This is useful because the S2LatLngRect bounding box passed to the shards contains
|
||||
# 2,2 yet it should not be returned in the changefeed results.
|
||||
- rb: feed = tbl.get_intersecting(r.circle(r.point(1,1), 130000), {index: "L"}).get_field("id").changes(include_initial: true)
|
||||
|
||||
- rb: tbl.insert([obj11, obj12, obj21, obj22])
|
||||
ot: partial({'errors': 0, 'inserted': 4})
|
||||
|
||||
- rb: fetch(feed, 3)
|
||||
ot: bag([{"new_val" => "11", "old_val" => nil}, {"new_val" => "12", "old_val" => nil}, {"new_val" => "21", "old_val" => nil}])
|
||||
|
@ -1,38 +0,0 @@
|
||||
desc: Test duplicate indexes with squashing
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- cd: tbl.index_create('a')
|
||||
ot: partial({'created':1})
|
||||
- cd: tbl.index_wait('a')
|
||||
|
||||
- py: feed = tbl.order_by(index='a').limit(10).changes(squash=2)
|
||||
rb: feed = tbl.orderby(index:'a').limit(10).changes(squash:2).limit(9)
|
||||
js: feed = tbl.orderBy({index:'a'}).limit(10).changes({squash:2}).limit(9)
|
||||
runopts:
|
||||
# limit the number of pre-fetched rows
|
||||
max_batch_rows: 1
|
||||
|
||||
- py: tbl.insert(r.range(0, 12).map({'id':r.row, 'a':5}))
|
||||
rb: tbl.insert(r.range(0, 12).map{|row| {'id':row, 'a':5}})
|
||||
js: tbl.insert(r.range(0, 12).map(function(row){ return {'id':row, 'a':5}; }))
|
||||
ot: partial({'inserted':12, 'errors':0})
|
||||
|
||||
- py: tbl.get_all(1, 8, 9, index='id').delete()
|
||||
rb: tbl.get_all(1, 8, 9, index:'id').delete()
|
||||
js: tbl.get_all(1, 8, 9, {index:'id'}).delete()
|
||||
ot: partial({'deleted':3, 'errors':0})
|
||||
|
||||
# should be replaced with a noreplyWait
|
||||
- cd: wait(2)
|
||||
|
||||
- cd: fetch(feed)
|
||||
ot: bag([
|
||||
{"new_val":{"a":5, "id":0}, "old_val":nil},
|
||||
{"new_val":{"a":5, "id":2}, "old_val":nil},
|
||||
{"new_val":{"a":5, "id":3}, "old_val":nil},
|
||||
{"new_val":{"a":5, "id":4}, "old_val":nil},
|
||||
{"new_val":{"a":5, "id":5}, "old_val":nil},
|
||||
{"new_val":{"a":5, "id":6}, "old_val":nil},
|
||||
{"new_val":{"a":5, "id":7}, "old_val":nil},
|
||||
{"new_val":{"a":5, "id":10}, "old_val":nil},
|
||||
{"new_val":{"a":5, "id":11}, "old_val":nil}])
|
@ -1,58 +0,0 @@
|
||||
desc: Test `include_states`
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- py: tbl.changes(squash=true, include_states=true).limit(1)
|
||||
rb: tbl.changes(squash:true, include_states:true).limit(1)
|
||||
js: tbl.changes({squash:true, includeStates:true}).limit(1)
|
||||
ot: [{'state':'ready'}]
|
||||
|
||||
- py: tbl.get(0).changes(squash=true, include_states=true, include_initial=true).limit(3)
|
||||
rb: tbl.get(0).changes(squash:true, include_states:true, include_initial:true).limit(3)
|
||||
js: tbl.get(0).changes({squash:true, includeStates:true, includeInitial:true}).limit(3)
|
||||
ot: [{'state':'initializing'}, {'new_val':null}, {'state':'ready'}]
|
||||
|
||||
- py: tbl.order_by(index='id').limit(10).changes(squash=true, include_states=true, include_initial=true).limit(2)
|
||||
rb: tbl.order_by(index:'id').limit(10).changes(squash:true, include_states:true, include_initial:true).limit(2)
|
||||
js: tbl.orderBy({index:'id'}).limit(10).changes({squash:true, includeStates:true, includeInitial:true}).limit(2)
|
||||
ot: [{'state':'initializing'}, {'state':'ready'}]
|
||||
|
||||
- cd: tbl.insert({'id':1})
|
||||
|
||||
- py: tbl.order_by(index='id').limit(10).changes(squash=true, include_states=true, include_initial=true).limit(3)
|
||||
rb: tbl.order_by(index:'id').limit(10).changes(squash:true, include_states:true, include_initial:true).limit(3)
|
||||
js: tbl.orderBy({index:'id'}).limit(10).changes({squash:true, includeStates:true, includeInitial:true}).limit(3)
|
||||
ot: [{'state':'initializing'}, {'new_val':{'id':1}}, {'state':'ready'}]
|
||||
|
||||
- py: tblchanges = tbl.changes(squash=true, include_states=true)
|
||||
rb: tblchanges = tbl.changes(squash:true, include_states:true)
|
||||
js: tblchanges = tbl.changes({squash:true, includeStates:true})
|
||||
|
||||
- cd: tbl.insert({'id':2})
|
||||
|
||||
- cd: fetch(tblchanges, 2)
|
||||
ot: [{'state':'ready'},{'new_val':{'id':2},'old_val':null}]
|
||||
|
||||
- py: getchanges = tbl.get(2).changes(include_states=true, include_initial=true)
|
||||
rb: getchanges = tbl.get(2).changes(include_states:true, include_initial:true)
|
||||
js: getchanges = tbl.get(2).changes({includeStates:true, includeInitial:true})
|
||||
|
||||
- cd: tbl.get(2).update({'a':1})
|
||||
|
||||
- cd: fetch(getchanges, 4)
|
||||
ot: [{'state':'initializing'}, {'new_val':{'id':2}}, {'state':'ready'}, {'old_val':{'id':2},'new_val':{'id':2,'a':1}}]
|
||||
|
||||
- py: limitchanges = tbl.order_by(index='id').limit(10).changes(include_states=true, include_initial=true)
|
||||
rb: limitchanges = tbl.order_by(index:'id').limit(10).changes(include_states:true, include_initial:true)
|
||||
js: limitchanges = tbl.orderBy({index:'id'}).limit(10).changes({includeStates:true, includeInitial:true})
|
||||
|
||||
- py: limitchangesdesc = tbl.order_by(index=r.desc('id')).limit(10).changes(include_states=true, include_initial=true)
|
||||
rb: limitchangesdesc = tbl.order_by(index:r.desc('id')).limit(10).changes(include_states:true, include_initial:true)
|
||||
js: limitchangesdesc = tbl.orderBy({index:r.desc('id')}).limit(10).changes({includeStates:true, includeInitial:true})
|
||||
|
||||
- cd: tbl.insert({'id':3})
|
||||
|
||||
- cd: fetch(limitchanges, 5)
|
||||
ot: [{'state':'initializing'}, {'new_val':{'id':1}}, {'new_val':{'a':1, 'id':2}}, {'state':'ready'}, {'old_val':null, 'new_val':{'id':3}}]
|
||||
|
||||
- cd: fetch(limitchangesdesc, 5)
|
||||
ot: [{'state':'initializing'}, {'new_val':{'a':1, 'id':2}}, {'new_val':{'id':1}}, {'state':'ready'}, {'old_val':null, 'new_val':{'id':3}}]
|
@ -1,147 +0,0 @@
|
||||
desc: Test point changebasics
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# -- basic
|
||||
|
||||
# start a feed
|
||||
|
||||
- cd: basic = tbl.get(1).changes({include_initial:true})
|
||||
py: basic = tbl.get(1).changes(include_initial=True)
|
||||
|
||||
# - inital return
|
||||
|
||||
- cd: fetch(basic, 1)
|
||||
ot: [{'new_val':null}]
|
||||
|
||||
# - inserts
|
||||
|
||||
- cd: tbl.insert({'id':1})
|
||||
ot: partial({'errors':0, 'inserted':1})
|
||||
|
||||
- cd: fetch(basic, 1)
|
||||
ot: [{'old_val':null, 'new_val':{'id':1}}]
|
||||
|
||||
# - updates
|
||||
|
||||
- cd: tbl.get(1).update({'update':1})
|
||||
ot: partial({'errors':0, 'replaced':1})
|
||||
|
||||
- cd: fetch(basic, 1)
|
||||
ot: [{'old_val':{'id':1}, 'new_val':{'id':1,'update':1}}]
|
||||
|
||||
# - deletions
|
||||
|
||||
- cd: tbl.get(1).delete()
|
||||
ot: partial({'errors':0, 'deleted':1})
|
||||
|
||||
- cd: fetch(basic, 1)
|
||||
ot: [{'old_val':{'id':1,'update':1}, 'new_val':null}]
|
||||
|
||||
# - closing
|
||||
|
||||
- cd: basic.close()
|
||||
rb: def pass; end
|
||||
# the ruby test driver currently has to mangle cursors, so we can't close them properly
|
||||
|
||||
# -- filter
|
||||
|
||||
- py: filter = tbl.get(1).changes(squash=false,include_initial=True).filter(r.row['new_val']['update'].gt(2))['new_val']['update']
|
||||
rb: filter = tbl.get(1).changes(squash:false,include_initial:true).filter{|row| row['new_val']['update'].gt(2)}['new_val']['update']
|
||||
js: filter = tbl.get(1).changes({squash:false,include_initial:true}).filter(r.row('new_val')('update').gt(2))('new_val')('update')
|
||||
|
||||
- cd: tbl.insert({'id':1, 'update':1})
|
||||
- cd: tbl.get(1).update({'update':4})
|
||||
- cd: tbl.get(1).update({'update':1})
|
||||
- cd: tbl.get(1).update({'update':7})
|
||||
|
||||
- cd: fetch(filter, 2)
|
||||
ot: [4,7]
|
||||
|
||||
# -- pluck on values
|
||||
|
||||
- py: pluck = tbl.get(3).changes(squash=false,include_initial=True).pluck({'new_val':['red', 'blue']})['new_val']
|
||||
rb: pluck = tbl.get(3).changes(squash:false,include_initial:true).pluck({'new_val':['red', 'blue']})['new_val']
|
||||
js: pluck = tbl.get(3).changes({squash:false,include_initial:true}).pluck({'new_val':['red', 'blue']})('new_val')
|
||||
|
||||
- cd: tbl.insert({'id':3, 'red':1, 'green':1})
|
||||
ot: partial({'errors':0, 'inserted':1})
|
||||
- cd: tbl.get(3).update({'blue':2, 'green':3})
|
||||
ot: partial({'errors':0, 'replaced':1})
|
||||
- cd: tbl.get(3).update({'green':4})
|
||||
ot: partial({'errors':0, 'replaced':1})
|
||||
- cd: tbl.get(3).update({'blue':4})
|
||||
ot: partial({'errors':0, 'replaced':1})
|
||||
|
||||
- cd: fetch(pluck, 4)
|
||||
ot: [{'red': 1}, {'blue': 2, 'red': 1}, {'blue': 2, 'red': 1}, {'blue': 4, 'red': 1}]
|
||||
|
||||
# -- virtual tables
|
||||
|
||||
# - rethinkdb._debug_scratch
|
||||
|
||||
- def: dtbl = r.db('rethinkdb').table('_debug_scratch')
|
||||
|
||||
- cd: debug = dtbl.get(1).changes({include_initial:true})
|
||||
py: debug = dtbl.get(1).changes(include_initial=True)
|
||||
|
||||
- cd: fetch(debug, 1)
|
||||
ot: [{'new_val':null}]
|
||||
|
||||
- cd: dtbl.insert({'id':1})
|
||||
ot: partial({'errors':0, 'inserted':1})
|
||||
- cd: fetch(debug, 1)
|
||||
ot: [{'old_val':null, 'new_val':{'id':1}}]
|
||||
|
||||
- cd: dtbl.get(1).update({'update':1})
|
||||
ot: partial({'errors':0, 'replaced':1})
|
||||
- cd: fetch(debug, 1)
|
||||
ot: [{'old_val':{'id':1}, 'new_val':{'id':1,'update':1}}]
|
||||
|
||||
- cd: dtbl.get(1).delete()
|
||||
ot: partial({'errors':0, 'deleted':1})
|
||||
- cd: fetch(debug, 1)
|
||||
ot: [{'old_val':{'id':1,'update':1}, 'new_val':null}]
|
||||
|
||||
- cd: dtbl.insert({'id':5, 'red':1, 'green':1})
|
||||
ot: {'skipped':0, 'deleted':0, 'unchanged':0, 'errors':0, 'replaced':0, 'inserted':1}
|
||||
- py: dtblPluck = dtbl.get(5).changes(include_initial=True).pluck({'new_val':['red', 'blue']})['new_val']
|
||||
rb: dtblPluck = dtbl.get(5).changes(include_initial:true).pluck({'new_val':['red', 'blue']})['new_val']
|
||||
js: dtblPluck = dtbl.get(5).changes({include_initial:true}).pluck({'new_val':['red', 'blue']})('new_val')
|
||||
|
||||
# disabled because inital value is not being reported correctly, so goes missing. see #3723
|
||||
- cd: fetch(dtblPluck, 1)
|
||||
ot: [{'red':1}]
|
||||
|
||||
- cd: dtbl.get(5).update({'blue':2, 'green':3})
|
||||
ot: partial({'errors':0, 'replaced':1})
|
||||
|
||||
- cd: fetch(dtblPluck, 1)
|
||||
ot: [{'blue':2, 'red':1}]
|
||||
|
||||
# - rethinkdb.table_status bad optargs
|
||||
|
||||
# disabled, re-enable once #3725 is done
|
||||
# - py: r.db('rethinkdb').table('table_status').changes(squash=False)
|
||||
# rb: r.db('rethinkdb').table('table_status').changes(squash:False)
|
||||
# js: r.db('rethinkdb').table('table_status').changes({squash:False})
|
||||
# ot: err('ReqlRuntimeError', 'replace with error message decided in \#3725')
|
||||
|
||||
# - rethinkdb.table_status
|
||||
|
||||
- cd: tableId = tbl.info()['id']
|
||||
js: tableId = tbl.info()('id')
|
||||
|
||||
- cd: rtblPluck = r.db('rethinkdb').table('table_status').get(tableId).changes({include_initial:true})
|
||||
py: rtblPluck = r.db('rethinkdb').table('table_status').get(tableId).changes(include_initial=True)
|
||||
- cd: fetch(rtblPluck, 1)
|
||||
ot: partial([{'new_val':partial({'db':'test'})}])
|
||||
|
||||
- py: tbl.reconfigure(shards=3, replicas=1)
|
||||
rb: tbl.reconfigure(shards:3, replicas:1)
|
||||
js: tbl.reconfigure({shards:3, replicas:1})
|
||||
- py: fetch(rtblPluck, 1, 2)
|
||||
js: fetch(rtblPluck, 1, 2)
|
||||
rb: fetch(rtblPluck, 1)
|
||||
ot: partial([{'old_val':partial({'db':'test'}), 'new_val':partial({'db':'test'})}])
|
||||
|
@ -1,50 +0,0 @@
|
||||
desc: Test basic changefeed operations
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# Fill in some data
|
||||
- rb: tbl.index_create('a')
|
||||
ot: partial({'created':1})
|
||||
|
||||
- rb: tbl.index_wait().count
|
||||
ot: 1
|
||||
|
||||
- rb: tbl.insert([{id:1, a:8}, {id:2, a:7}])
|
||||
ot: partial({'errors':0, 'inserted':2})
|
||||
|
||||
- rb: idmin = tbl.min(index:'id').changes(squash:false, include_initial:true).limit(2)
|
||||
- rb: idmax = tbl.max(index:'id').changes(squash:false, include_initial:true).limit(2)
|
||||
- rb: amin = tbl.min(index:'a').changes(squash:false, include_initial:true).limit(2)
|
||||
- rb: amax = tbl.max(index:'a').changes(squash:false, include_initial:true).limit(2)
|
||||
|
||||
- rb: idmin2 = tbl.min(index:'id').changes(squash:true, include_initial:true).limit(2)
|
||||
- rb: idmax2 = tbl.max(index:'id').changes(squash:true, include_initial:true).limit(2)
|
||||
- rb: amin2 = tbl.min(index:'a').changes(squash:true, include_initial:true).limit(2)
|
||||
- rb: amax2 = tbl.max(index:'a').changes(squash:true, include_initial:true).limit(2)
|
||||
|
||||
- rb: tbl.insert([{id:0, a:9}, {id:3, a:6}])
|
||||
ot: partial({'errors':0, 'inserted':2})
|
||||
|
||||
- rb: idmin.to_a
|
||||
ot: ([{"new_val"=>{"a"=>8, "id"=>1}}, {"new_val"=>{"a"=>9, "id"=>0}, "old_val"=>{"a"=>8, "id"=>1}}])
|
||||
|
||||
- rb: idmax.to_a
|
||||
ot: ([{"new_val"=>{"a"=>7, "id"=>2}}, {"new_val"=>{"a"=>6, "id"=>3}, "old_val"=>{"a"=>7, "id"=>2}}])
|
||||
|
||||
- rb: amin.to_a
|
||||
ot: ([{"new_val"=>{"a"=>7, "id"=>2}}, {"new_val"=>{"a"=>6, "id"=>3}, "old_val"=>{"a"=>7, "id"=>2}}])
|
||||
|
||||
- rb: amax.to_a
|
||||
ot: ([{"new_val"=>{"a"=>8, "id"=>1}}, {"new_val"=>{"a"=>9, "id"=>0}, "old_val"=>{"a"=>8, "id"=>1}}])
|
||||
|
||||
- rb: idmin2.to_a
|
||||
ot: ([{"new_val"=>{"a"=>8, "id"=>1}}, {"new_val"=>{"a"=>9, "id"=>0}, "old_val"=>{"a"=>8, "id"=>1}}])
|
||||
|
||||
- rb: idmax2.to_a
|
||||
ot: ([{"new_val"=>{"a"=>7, "id"=>2}}, {"new_val"=>{"a"=>6, "id"=>3}, "old_val"=>{"a"=>7, "id"=>2}}])
|
||||
|
||||
- rb: amin2.to_a
|
||||
ot: ([{"new_val"=>{"a"=>7, "id"=>2}}, {"new_val"=>{"a"=>6, "id"=>3}, "old_val"=>{"a"=>7, "id"=>2}}])
|
||||
|
||||
- rb: amax2.to_a
|
||||
ot: ([{"new_val"=>{"a"=>8, "id"=>1}}, {"new_val"=>{"a"=>9, "id"=>0}, "old_val"=>{"a"=>8, "id"=>1}}])
|
@ -1,62 +0,0 @@
|
||||
desc: Test changefeed squashing
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# Check type
|
||||
|
||||
- py: tbl.changes(squash=true).type_of()
|
||||
rb: tbl.changes(squash:true).type_of()
|
||||
js: tbl.changes({squash:true}).typeOf()
|
||||
ot: ("STREAM")
|
||||
|
||||
# comparison changes
|
||||
|
||||
- cd: normal_changes = tbl.changes().limit(2)
|
||||
|
||||
- py: false_squash_changes = tbl.changes(squash=False).limit(2)
|
||||
js: false_squash_changes = tbl.changes({squash:false}).limit(2)
|
||||
rb: false_squash_changes = tbl.changes(squash:false).limit(2)
|
||||
|
||||
- py: long_squash_changes = tbl.changes(squash=0.5).limit(1)
|
||||
js: long_squash_changes = tbl.changes({squash:0.5}).limit(1)
|
||||
rb: long_squash_changes = tbl.changes(squash:0.5).limit(1)
|
||||
|
||||
- py: squash_changes = tbl.changes(squash=true).limit(1)
|
||||
js: squash_changes = tbl.changes({squash:true}).limit(1)
|
||||
rb: squash_changes = tbl.changes(squash:true).limit(1)
|
||||
|
||||
- cd: tbl.insert({'id':100})['inserted']
|
||||
js: tbl.insert({'id':100})('inserted')
|
||||
ot: 1
|
||||
|
||||
- cd: tbl.get(100).update({'a':1})['replaced']
|
||||
js: tbl.get(100).update({'a':1})('replaced')
|
||||
ot: 1
|
||||
|
||||
- cd: normal_changes
|
||||
ot: ([{'new_val':{'id':100}, 'old_val':null},
|
||||
{'new_val':{'a':1, 'id':100}, 'old_val':{'id':100}}])
|
||||
|
||||
- cd: false_squash_changes
|
||||
ot: ([{'new_val':{'id':100}, 'old_val':null},
|
||||
{'new_val':{'a':1, 'id':100}, 'old_val':{'id':100}}])
|
||||
|
||||
- cd: long_squash_changes
|
||||
ot: ([{'new_val':{'a':1, 'id':100}, 'old_val':null}])
|
||||
|
||||
- cd: squash_changes
|
||||
ot:
|
||||
js: ([{'new_val':{'a':1, 'id':100}, 'old_val':null}])
|
||||
cd: ([{'new_val':{'id':100}, 'old_val':null}])
|
||||
|
||||
# Bad squash values
|
||||
|
||||
- py: tbl.changes(squash=null)
|
||||
rb: tbl.changes(squash:null)
|
||||
js: tbl.changes({squash:null})
|
||||
ot: err('ReqlQueryLogicError', 'Expected BOOL or NUMBER but found NULL.')
|
||||
|
||||
- py: tbl.changes(squash=-10)
|
||||
rb: tbl.changes(squash:-10)
|
||||
js: tbl.changes({squash:-10})
|
||||
ot: err('ReqlQueryLogicError', 'Expected BOOL or a positive NUMBER but found a negative NUMBER.')
|
@ -1,101 +0,0 @@
|
||||
desc: Test changefeeds on a table
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# ==== regular tables
|
||||
|
||||
# - start feeds
|
||||
|
||||
- cd: all = tbl.changes()
|
||||
|
||||
# - note: no initial values from table changefeeds
|
||||
|
||||
# - inserts
|
||||
|
||||
- cd: tbl.insert([{'id':1}, {'id':2}])
|
||||
ot: partial({'errors':0, 'inserted':2})
|
||||
- cd: fetch(all, 2)
|
||||
ot: bag([{'old_val':null, 'new_val':{'id':1}}, {'old_val':null, 'new_val':{'id':2}}])
|
||||
|
||||
# - updates
|
||||
|
||||
- cd: tbl.get(1).update({'version':1})
|
||||
ot: partial({'errors':0, 'replaced':1})
|
||||
- cd: fetch(all, 1)
|
||||
ot: [{'old_val':{'id':1}, 'new_val':{'id':1, 'version':1}}]
|
||||
|
||||
# - deletions
|
||||
|
||||
- cd: tbl.get(1).delete()
|
||||
ot: partial({'errors':0, 'deleted':1})
|
||||
- cd: fetch(all, 1)
|
||||
ot: [{'old_val':{'id':1, 'version':1}, 'new_val':null}]
|
||||
|
||||
# - pluck on values
|
||||
|
||||
- cd: pluck = tbl.changes().pluck({'new_val':['version']})
|
||||
- cd: tbl.insert([{'id':5, 'version':5}])
|
||||
ot: partial({'errors':0, 'inserted':1})
|
||||
- cd: fetch(pluck, 1)
|
||||
ot: [{'new_val':{'version':5}}]
|
||||
|
||||
# - order by
|
||||
|
||||
- cd: tbl.changes().order_by('id')
|
||||
ot: err('ReqlQueryLogicError', "Cannot call a terminal (`reduce`, `count`, etc.) on an infinite stream (such as a changefeed).")
|
||||
#
|
||||
# ToDo: enable this when #4067 is done
|
||||
#
|
||||
# - js: orderedLimit = tbl.changes().limit(5).order_by(r.desc('id'))('new_val')('id')
|
||||
# cd: orderedLimit = tbl.changes().limit(5).order_by(r.desc('id'))['new_val']['id']
|
||||
# - js: tbl.range(100, 105).map(function (row) { return {'id':row} })
|
||||
# py: tbl.range(100, 105).map({'id':r.row})
|
||||
# rb: tbl.range(100, 105).map{|row| {'id':row}}
|
||||
# - cd: fetch(orderedLimit)
|
||||
# ot: [104, 103, 102, 101, 100]
|
||||
|
||||
# - changes overflow
|
||||
|
||||
- cd: overflow = tbl.changes()
|
||||
runopts:
|
||||
changefeed_queue_size: 100
|
||||
# add enough entries to make sure we get the overflow error
|
||||
- js: tbl.insert(r.range(200).map(function(x) { return({}); }))
|
||||
py: tbl.insert(r.range(200).map(lambda x: {}))
|
||||
rb: tbl.insert(r.range(200).map{|x| {}})
|
||||
- cd: fetch(overflow, 90)
|
||||
ot: partial([{'error': regex('Changefeed cache over array size limit, skipped \d+ elements.')}])
|
||||
|
||||
# ==== virtual tables
|
||||
|
||||
- def: vtbl = r.db('rethinkdb').table('_debug_scratch')
|
||||
- cd: allVirtual = vtbl.changes()
|
||||
|
||||
# - inserts
|
||||
|
||||
- cd: vtbl.insert([{'id':1}, {'id':2}])
|
||||
ot: partial({'errors':0, 'inserted':2})
|
||||
- cd: fetch(allVirtual, 2)
|
||||
ot: bag([{'old_val':null, 'new_val':{'id':1}}, {'old_val':null, 'new_val':{'id':2}}])
|
||||
|
||||
# - updates
|
||||
|
||||
- cd: vtbl.get(1).update({'version':1})
|
||||
ot: partial({'errors':0, 'replaced':1})
|
||||
- cd: fetch(allVirtual, 1)
|
||||
ot: [{'old_val':{'id':1}, 'new_val':{'id':1, 'version':1}}]
|
||||
|
||||
# - deletions
|
||||
|
||||
- cd: vtbl.get(1).delete()
|
||||
ot: partial({'errors':0, 'deleted':1})
|
||||
- cd: fetch(allVirtual, 1)
|
||||
ot: [{'old_val':{'id':1, 'version':1}, 'new_val':null}]
|
||||
|
||||
# - pluck on values
|
||||
|
||||
- cd: vpluck = vtbl.changes().pluck({'new_val':['version']})
|
||||
- cd: vtbl.insert([{'id':5, 'version':5}])
|
||||
ot: partial({'errors':0, 'inserted':1})
|
||||
- cd: fetch(vpluck, 1)
|
||||
ot: [{'new_val':{'version':5}}]
|
@ -1,297 +0,0 @@
|
||||
desc: Tests RQL control flow structures
|
||||
table_variable_name: tbl, tbl2
|
||||
tests:
|
||||
|
||||
## FunCall
|
||||
|
||||
- py: r.expr(1).do(lambda v: v * 2)
|
||||
js: r.expr(1).do(function(v) { return v.mul(2); })
|
||||
rb: r.expr(1).do{|v| v * 2 }
|
||||
ot: 2
|
||||
|
||||
- py: r.expr([0, 1, 2]).do(lambda v: v.append(3))
|
||||
js: r([0, 1, 2]).do(function(v) { return v.append(3); })
|
||||
rb: r([0, 1, 2]).do{ |v| v.append(3) }
|
||||
ot: [0, 1, 2, 3]
|
||||
|
||||
- py: r.do(1, 2, lambda x, y: x + y)
|
||||
js: r.do(1, 2, function(x, y) { return x.add(y); })
|
||||
rb: r.do(1, 2) {|x, y| x + y}
|
||||
ot: 3
|
||||
|
||||
- py: r.do(lambda: 1)
|
||||
js: r.do(function() { return 1; })
|
||||
rb: r.do{1}
|
||||
ot: 1
|
||||
|
||||
# do error cases
|
||||
- py: r.do(1, 2, lambda x: x)
|
||||
js: r.do(1, 2, function(x) { return x; })
|
||||
rb: r.do(1, 2) {|x| x}
|
||||
ot: err("ReqlQueryLogicError", 'Expected function with 2 arguments but found function with 1 argument.', [1])
|
||||
|
||||
- py: r.do(1, 2, 3, lambda x, y: x + y)
|
||||
js: r.do(1, 2, 3, function(x, y) { return x.add(y); })
|
||||
rb: r.do(1, 2, 3) {|x, y| x + y}
|
||||
ot: err("ReqlQueryLogicError", 'Expected function with 3 arguments but found function with 2 arguments.', [1])
|
||||
|
||||
- cd: r.do(1)
|
||||
ot: 1
|
||||
|
||||
- js: r.do(1, function(x) {})
|
||||
ot: err("ReqlDriverCompileError", 'Anonymous function returned `undefined`. Did you forget a `return`?', [1])
|
||||
|
||||
- js: r.do(1, function(x) { return undefined; })
|
||||
ot: err("ReqlDriverCompileError", 'Anonymous function returned `undefined`. Did you forget a `return`?', [1])
|
||||
|
||||
- cd: r.do()
|
||||
ot:
|
||||
cd: err("ReqlCompileError", 'Expected 1 or more arguments but found 0.', [1])
|
||||
|
||||
# FunCall errors
|
||||
|
||||
- py: r.expr('abc').do(lambda v: v.append(3))
|
||||
js: r('abc').do(function(v) { return v.append(3); })
|
||||
rb: r('abc').do{ |v| v.append(3) }
|
||||
ot: err("ReqlQueryLogicError", "Expected type ARRAY but found STRING.", [1, 0])
|
||||
|
||||
- py: r.expr('abc').do(lambda v: v + 3)
|
||||
js: r('abc').do(function(v) { return v.add(3); })
|
||||
rb: r('abc').do{ |v| v + 3 }
|
||||
ot: err("ReqlQueryLogicError", "Expected type STRING but found NUMBER.", [1, 1])
|
||||
|
||||
- py: r.expr('abc').do(lambda v: v + 'def') + 3
|
||||
js: r('abc').do(function(v) { return v.add('def'); }).add(3)
|
||||
rb: r('abc').do{ |v| v + 'def' } + 3
|
||||
ot: err("ReqlQueryLogicError", "Expected type STRING but found NUMBER.", [1])
|
||||
|
||||
- py: r.expr(0).do(lambda a,b: a + b)
|
||||
js: r(0).do(function(a,b) { return a.add(b); })
|
||||
rb: r(0).do{ |a, b| a + b }
|
||||
ot: err("ReqlQueryLogicError", 'Expected function with 1 argument but found function with 2 arguments.', [1])
|
||||
|
||||
- py: r.do(1, 2, lambda a: a)
|
||||
js: r.do(1,2, function(a) { return a; })
|
||||
rb: r.do(1, 2) { |a| a }
|
||||
ot: err("ReqlQueryLogicError", 'Expected function with 2 arguments but found function with 1 argument.', [1])
|
||||
|
||||
- cd: r.expr(5).do(r.row)
|
||||
rb: r(5).do{ |row| row }
|
||||
ot: 5
|
||||
|
||||
## Branch
|
||||
|
||||
- cd: r.branch(True, 1, 2)
|
||||
ot: 1
|
||||
- cd: r.branch(False, 1, 2)
|
||||
ot: 2
|
||||
- cd: r.branch(1, 'c', False)
|
||||
ot: ("c")
|
||||
- cd: r.branch(null, {}, [])
|
||||
ot: ([])
|
||||
|
||||
- cd: r.branch(r.db('test'), 1, 2)
|
||||
ot: err("ReqlQueryLogicError", "Expected type DATUM but found DATABASE:", [])
|
||||
- cd: r.branch(tbl, 1, 2)
|
||||
ot: err("ReqlQueryLogicError", "Expected type DATUM but found TABLE:", [])
|
||||
- cd: r.branch(r.error("a"), 1, 2)
|
||||
ot: err("ReqlUserError", "a", [])
|
||||
|
||||
- cd: r.branch([], 1, 2)
|
||||
ot: 1
|
||||
- cd: r.branch({}, 1, 2)
|
||||
ot: 1
|
||||
- cd: r.branch("a", 1, 2)
|
||||
ot: 1
|
||||
- cd: r.branch(1.2, 1, 2)
|
||||
ot: 1
|
||||
|
||||
- cd: r.branch(True, 1, True, 2, 3)
|
||||
ot: 1
|
||||
- cd: r.branch(True, 1, False, 2, 3)
|
||||
ot: 1
|
||||
- cd: r.branch(False, 1, True, 2, 3)
|
||||
ot: 2
|
||||
- cd: r.branch(False, 1, False, 2, 3)
|
||||
ot: 3
|
||||
|
||||
- cd: r.branch(True, 1, True, 2)
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `branch` term with an even number of arguments.")
|
||||
|
||||
# r.error()
|
||||
- cd: r.error('Hello World')
|
||||
ot: err("ReqlUserError", "Hello World", [0])
|
||||
|
||||
- cd: r.error(5)
|
||||
# we might want to allow this eventually
|
||||
ot: err("ReqlQueryLogicError", "Expected type STRING but found NUMBER.", [0])
|
||||
|
||||
# r.filter
|
||||
- cd: r.expr([1, 2, 3]).filter()
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 2 arguments but found 1.", [0])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 0.", [0])
|
||||
- cd: r.expr([1, 2, 3]).filter(1, 2)
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Expected 2 arguments but found 3.", [0])
|
||||
js: err("ReqlCompileError", "Expected 1 argument (not including options) but found 2.", [0])
|
||||
|
||||
# r.js()
|
||||
- cd: r.js('1 + 1')
|
||||
ot: 2
|
||||
|
||||
- cd: r.js('1 + 1; 2 + 2')
|
||||
ot: 4
|
||||
|
||||
- cd: r.do(1, 2, r.js('(function(a, b) { return a + b; })'))
|
||||
ot: 3
|
||||
|
||||
- cd: r.expr(1).do(r.js('(function(x) { return x + 1; })'))
|
||||
ot: 2
|
||||
|
||||
- cd: r.expr('foo').do(r.js('(function(x) { return x + "bar"; })'))
|
||||
ot: 'foobar'
|
||||
|
||||
# js timeout optarg shouldn't be triggered
|
||||
- cd: r.js('1 + 2', {timeout:1.2})
|
||||
py: r.js('1 + 2', timeout=1.2)
|
||||
ot: 3
|
||||
|
||||
# js error cases
|
||||
- cd: r.js('(function() { return 1; })')
|
||||
ot: err("ReqlQueryLogicError", "Query result must be of type DATUM, GROUPED_DATA, or STREAM (got FUNCTION).", [0])
|
||||
|
||||
- cd: r.js('function() { return 1; }')
|
||||
ot: err("ReqlQueryLogicError", "SyntaxError: Unexpected token (", [0])
|
||||
|
||||
# Play with the number of arguments in the JS function
|
||||
- cd: r.do(1, 2, r.js('(function(a) { return a; })'))
|
||||
ot: 1
|
||||
|
||||
- cd: r.do(1, 2, r.js('(function(a, b, c) { return a; })'))
|
||||
ot: 1
|
||||
|
||||
- cd: r.do(1, 2, r.js('(function(a, b, c) { return c; })'))
|
||||
ot: err("ReqlQueryLogicError", "Cannot convert javascript `undefined` to ql::datum_t.", [0])
|
||||
|
||||
- cd: r.expr([1, 2, 3]).filter(r.js('(function(a) { return a >= 2; })'))
|
||||
ot: ([2, 3])
|
||||
|
||||
- cd: r.expr([1, 2, 3]).map(r.js('(function(a) { return a + 1; })'))
|
||||
ot: ([2, 3, 4])
|
||||
|
||||
- cd: r.expr([1, 2, 3]).map(r.js('1'))
|
||||
ot: err("ReqlQueryLogicError", "Expected type FUNCTION but found DATUM:", [0])
|
||||
|
||||
- cd: r.expr([1, 2, 3]).filter(r.js('(function(a) {})'))
|
||||
ot: err("ReqlQueryLogicError", "Cannot convert javascript `undefined` to ql::datum_t.", [0])
|
||||
|
||||
# What happens if we pass static values to things that expect functions
|
||||
- cd: r.expr([1, 2, 3]).map(1)
|
||||
ot: err("ReqlQueryLogicError", "Expected type FUNCTION but found DATUM:", [0])
|
||||
|
||||
- cd: r.expr([1, 2, 3]).filter('foo')
|
||||
ot: ([1, 2, 3])
|
||||
- cd: r.expr([1, 2, 4]).filter([])
|
||||
ot: ([1, 2, 4])
|
||||
- cd: r.expr([1, 2, 3]).filter(null)
|
||||
ot: ([])
|
||||
|
||||
- cd: r.expr([1, 2, 4]).filter(False)
|
||||
rb: r([1, 2, 4]).filter(false)
|
||||
ot: ([])
|
||||
|
||||
# forEach
|
||||
- cd: tbl.count()
|
||||
ot: 0
|
||||
|
||||
# Insert three elements
|
||||
- js: r([1, 2, 3]).forEach(function (row) { return tbl.insert({ id:row }) })
|
||||
py: r.expr([1, 2, 3]).for_each(lambda row:tbl.insert({ 'id':row }))
|
||||
rb: r([1, 2, 3]).for_each{ |row| tbl.insert({ :id => row }) }
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':3})
|
||||
|
||||
- cd: tbl.count()
|
||||
ot: 3
|
||||
|
||||
# Update each row to add additional attribute
|
||||
- js: r([1, 2, 3]).forEach(function (row) { return tbl.update({ foo:row }) })
|
||||
py: r.expr([1,2,3]).for_each(lambda row:tbl.update({'foo':row}))
|
||||
rb: r.expr([1,2,3]).for_each{ |row| tbl.update({ :foo => row }) }
|
||||
ot: ({'deleted':0.0,'replaced':9,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
# Insert three more elements (and error on three)
|
||||
- js: r([1, 2, 3]).forEach(function (row) { return [tbl.insert({ id:row }), tbl.insert({ id:row.mul(10) })] })
|
||||
py: r.expr([1,2,3]).for_each(lambda row:[tbl.insert({ 'id':row }), tbl.insert({ 'id':row*10 })])
|
||||
rb: r.expr([1,2,3]).for_each{ |row| [tbl.insert({ :id => row}), tbl.insert({ :id => row*10})] }
|
||||
ot: {'first_error':"Duplicate primary key `id`:\n{\n\t\"foo\":\t3,\n\t\"id\":\t1\n}\n{\n\t\"id\":\t1\n}",'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':3,'skipped':0.0,'inserted':3}
|
||||
|
||||
- cd: tbl.count()
|
||||
ot: 6
|
||||
|
||||
- cd: tableCount = tbl2.count()
|
||||
- cd: r.expr([1, 2, 3]).for_each( tbl2.insert({}) )
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'generated_keys':arrlen(3,uuid()),'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':3})
|
||||
# inserts only a single document per #3700
|
||||
- cd: tbl2.count()
|
||||
ot: tableCount + 1
|
||||
|
||||
# We have six elements, update them 6*2*3=36 times
|
||||
- js: r([1, 2, 3]).forEach(function (row) { return [tbl.update({ foo:row }), tbl.update({ bar:row })] })
|
||||
py: r.expr([1,2,3]).for_each(lambda row:[tbl.update({'foo':row}), tbl.update({'bar':row})])
|
||||
rb: r.expr([1,2,3]).for_each{ |row| [tbl.update({:foo => row}), tbl.update({:bar => row})]}
|
||||
ot: ({'deleted':0.0,'replaced':36,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
# forEach negative cases
|
||||
- cd: r.expr([1, 2, 3]).for_each( tbl2.insert({ 'id':r.row }) )
|
||||
rb: r([1, 2, 3]).for_each{ |row| tbl2.insert({ 'id':row }) }
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':3})
|
||||
|
||||
- cd: r.expr([1, 2, 3]).for_each(1)
|
||||
ot: err("ReqlQueryLogicError", "FOR_EACH expects one or more basic write queries. Expected type ARRAY but found NUMBER.", [0])
|
||||
|
||||
- py: r.expr([1, 2, 3]).for_each(lambda x:x)
|
||||
js: r([1, 2, 3]).forEach(function (x) { return x; })
|
||||
rb: r([1, 2, 3]).for_each{ |x| x }
|
||||
ot: err("ReqlQueryLogicError", "FOR_EACH expects one or more basic write queries. Expected type ARRAY but found NUMBER.", [1, 1])
|
||||
|
||||
- cd: r.expr([1, 2, 3]).for_each(r.row)
|
||||
rb: r([1, 2, 3]).for_each{ |row| row }
|
||||
ot: err("ReqlQueryLogicError", "FOR_EACH expects one or more basic write queries. Expected type ARRAY but found NUMBER.", [1, 1])
|
||||
|
||||
- js: r([1, 2, 3]).forEach(function (row) { return tbl; })
|
||||
py: r.expr([1, 2, 3]).for_each(lambda row:tbl)
|
||||
rb: r([1, 2, 3]).for_each{ |row| tbl }
|
||||
ot: err("ReqlQueryLogicError", "FOR_EACH expects one or more basic write queries.", [1, 1])
|
||||
|
||||
# This is only relevant in JS -- what happens when we return undefined
|
||||
- js: r([1, 2, 3]).forEach(function (row) {})
|
||||
ot: err("ReqlDriverCompileError", 'Anonymous function returned `undefined`. Did you forget a `return`?', [1])
|
||||
|
||||
# Make sure write queries can't be nested into stream ops
|
||||
- cd: r.expr(1).do(tbl.insert({'foo':r.row}))
|
||||
rb: r(1).do{ |row| tbl.insert({ :foo => row }) }
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'generated_keys':arrlen(1,uuid()),'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':1})
|
||||
|
||||
- py: r.expr([1, 2])[0].do(tbl.insert({'foo':r.row}))
|
||||
js: r.expr([1, 2]).nth(0).do(tbl.insert({'foo':r.row}))
|
||||
rb: r([1, 2])[0].do{ |row| tbl.insert({ :foo => row }) }
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'generated_keys':arrlen(1,uuid()),'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':1})
|
||||
|
||||
- cd: r.expr([1, 2]).map(tbl.insert({'foo':r.row}))
|
||||
rb: r([1, 2]).map{ |row| tbl.insert({ :foo => row }) }
|
||||
ot: err('ReqlCompileError', 'Cannot nest writes or meta ops in stream operations. Use FOR_EACH instead.', [0])
|
||||
|
||||
- cd: r.expr([1, 2]).map(r.db('test').table_create('table_create_failure'))
|
||||
ot: err('ReqlCompileError', 'Cannot nest writes or meta ops in stream operations. Use FOR_EACH instead.', [0])
|
||||
|
||||
- cd: r.expr([1, 2]).map(tbl.insert({'foo':r.row}).get_field('inserted'))
|
||||
rb: r.expr([1, 2]).map{|x| tbl.insert({'foo':x}).get_field('inserted')}
|
||||
ot: err('ReqlCompileError', 'Cannot nest writes or meta ops in stream operations. Use FOR_EACH instead.', [0])
|
||||
|
||||
- cd: r.expr([1, 2]).map(tbl.insert({'foo':r.row}).get_field('inserted').add(5))
|
||||
rb: r.expr([1, 2]).map{|x| tbl.insert({'foo':x}).get_field('inserted').add(5)}
|
||||
ot: err('ReqlCompileError', 'Cannot nest writes or meta ops in stream operations. Use FOR_EACH instead.', [0])
|
||||
|
||||
- cd: r.expr(1).do(r.db('test').table_create('table_create_success'))
|
||||
ot: partial({'tables_created':1})
|
@ -1,133 +0,0 @@
|
||||
desc: Tests conversion to and from the RQL array type
|
||||
tests:
|
||||
- cd:
|
||||
- r.expr([])
|
||||
- r([])
|
||||
py: r.expr([])
|
||||
ot: []
|
||||
|
||||
- py: r.expr([1])
|
||||
js: r([1])
|
||||
rb: r([1])
|
||||
ot: [1]
|
||||
|
||||
- py: r.expr([1,2,3,4,5])
|
||||
js: r([1,2,3,4,5])
|
||||
rb: r.expr([1,2,3,4,5])
|
||||
ot: [1,2,3,4,5]
|
||||
|
||||
- cd: r.expr([]).type_of()
|
||||
ot: 'ARRAY'
|
||||
|
||||
# test coercions
|
||||
- cd:
|
||||
- r.expr([1, 2]).coerce_to('string')
|
||||
- r.expr([1, 2]).coerce_to('STRING')
|
||||
ot: '[1,2]'
|
||||
|
||||
- cd: r.expr([1, 2]).coerce_to('array')
|
||||
ot: [1, 2]
|
||||
|
||||
- cd: r.expr([1, 2]).coerce_to('number')
|
||||
ot: err('ReqlQueryLogicError', 'Cannot coerce ARRAY to NUMBER.', [0])
|
||||
|
||||
- cd: r.expr([['a', 1], ['b', 2]]).coerce_to('object')
|
||||
ot: {'a':1,'b':2}
|
||||
|
||||
- cd: r.expr([[]]).coerce_to('object')
|
||||
ot: err('ReqlQueryLogicError', 'Expected array of size 2, but got size 0.')
|
||||
|
||||
- cd: r.expr([['1',2,3]]).coerce_to('object')
|
||||
ot: err('ReqlQueryLogicError', 'Expected array of size 2, but got size 3.')
|
||||
|
||||
# Nested expression
|
||||
- cd: r.expr([r.expr(1)])
|
||||
ot: [1]
|
||||
|
||||
- cd: r.expr([1,3,4]).insert_at(1, 2)
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([2,3]).insert_at(0, 1)
|
||||
ot: [1,2,3]
|
||||
- cd: r.expr([1,2,3]).insert_at(-1, 4)
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([1,2,3]).insert_at(3, 4)
|
||||
ot: [1,2,3,4]
|
||||
- py: r.expr(3).do(lambda x: r.expr([1,2,3]).insert_at(x, 4))
|
||||
- js: r.expr(3).do(function (x) { return r.expr([1,2,3]).insert_at(x, 4); })
|
||||
- rb: r.expr(3).do{|x| r.expr([1,2,3]).insert_at(x, 4)}
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([1,2,3]).insert_at(4, 5)
|
||||
ot: err('ReqlNonExistenceError', 'Index `4` out of bounds for array of size: `3`.', [0])
|
||||
- cd: r.expr([1,2,3]).insert_at(-5, -1)
|
||||
ot: err('ReqlNonExistenceError', 'Index out of bounds: -5', [0])
|
||||
- cd: r.expr([1,2,3]).insert_at(1.5, 1)
|
||||
ot: err('ReqlQueryLogicError', 'Number not an integer: 1.5', [0])
|
||||
- cd: r.expr([1,2,3]).insert_at(null, 1)
|
||||
ot: err('ReqlNonExistenceError', 'Expected type NUMBER but found NULL.', [0])
|
||||
|
||||
- cd: r.expr([1,4]).splice_at(1, [2,3])
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([3,4]).splice_at(0, [1,2])
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([1,2]).splice_at(2, [3,4])
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([1,2]).splice_at(-1, [3,4])
|
||||
ot: [1,2,3,4]
|
||||
- py: r.expr(2).do(lambda x: r.expr([1,2]).splice_at(x, [3,4]))
|
||||
- js: r.expr(2).do(function (x) { return r.expr([1,2]).splice_at(x, [3,4]); })
|
||||
- rb: r.expr(2).do{|x| r.expr([1,2]).splice_at(x, [3,4])}
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([1,2]).splice_at(3, [3,4])
|
||||
ot: err('ReqlNonExistenceError', 'Index `3` out of bounds for array of size: `2`.', [0])
|
||||
- cd: r.expr([1,2]).splice_at(-4, [3,4])
|
||||
ot: err('ReqlNonExistenceError', 'Index out of bounds: -4', [0])
|
||||
- cd: r.expr([1,2,3]).splice_at(1.5, [1])
|
||||
ot: err('ReqlQueryLogicError', 'Number not an integer: 1.5', [0])
|
||||
- cd: r.expr([1,2,3]).splice_at(null, [1])
|
||||
ot: err('ReqlNonExistenceError', 'Expected type NUMBER but found NULL.', [0])
|
||||
- cd: r.expr([1,4]).splice_at(1, 2)
|
||||
ot: err('ReqlQueryLogicError', 'Expected type ARRAY but found NUMBER.', [0])
|
||||
|
||||
- cd: r.expr([1,2,3,4]).delete_at(0)
|
||||
ot: [2,3,4]
|
||||
- py: r.expr(0).do(lambda x: r.expr([1,2,3,4]).delete_at(x))
|
||||
- js: r.expr(0).do(function (x) { return r.expr([1,2,3,4]).delete_at(x); })
|
||||
- rb: r.expr(0).do{|x| r.expr([1,2,3,4]).delete_at(x)}
|
||||
ot: [2,3,4]
|
||||
- cd: r.expr([1,2,3,4]).delete_at(-1)
|
||||
ot: [1,2,3]
|
||||
- cd: r.expr([1,2,3,4]).delete_at(1,3)
|
||||
ot: [1,4]
|
||||
- cd: r.expr([1,2,3,4]).delete_at(4,4)
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([]).delete_at(0,0)
|
||||
ot: []
|
||||
- cd: r.expr([1,2,3,4]).delete_at(1,-1)
|
||||
ot: [1,4]
|
||||
- cd: r.expr([1,2,3,4]).delete_at(4)
|
||||
ot: err('ReqlNonExistenceError', 'Index `4` out of bounds for array of size: `4`.', [0])
|
||||
- cd: r.expr([1,2,3,4]).delete_at(-5)
|
||||
ot: err('ReqlNonExistenceError', 'Index out of bounds: -5', [0])
|
||||
- cd: r.expr([1,2,3]).delete_at(1.5)
|
||||
ot: err('ReqlQueryLogicError', 'Number not an integer: 1.5', [0])
|
||||
- cd: r.expr([1,2,3]).delete_at(null)
|
||||
ot: err('ReqlNonExistenceError', 'Expected type NUMBER but found NULL.', [0])
|
||||
|
||||
- cd: r.expr([0,2,3]).change_at(0, 1)
|
||||
ot: [1,2,3]
|
||||
- py: r.expr(1).do(lambda x: r.expr([0,2,3]).change_at(0,x))
|
||||
- js: r.expr(1).do(function (x) { return r.expr([0,2,3]).change_at(0,x); })
|
||||
- rb: r.expr(1).do{|x| r.expr([0,2,3]).change_at(0,x)}
|
||||
ot: [1,2,3]
|
||||
- cd: r.expr([1,0,3]).change_at(1, 2)
|
||||
ot: [1,2,3]
|
||||
- cd: r.expr([1,2,0]).change_at(2, 3)
|
||||
ot: [1,2,3]
|
||||
- cd: r.expr([1,2,3]).change_at(3, 4)
|
||||
ot: err('ReqlNonExistenceError', 'Index `3` out of bounds for array of size: `3`.', [0])
|
||||
- cd: r.expr([1,2,3,4]).change_at(-5, 1)
|
||||
ot: err('ReqlNonExistenceError', 'Index out of bounds: -5', [0])
|
||||
- cd: r.expr([1,2,3]).change_at(1.5, 1)
|
||||
ot: err('ReqlQueryLogicError', 'Number not an integer: 1.5', [0])
|
||||
- cd: r.expr([1,2,3]).change_at(null, 1)
|
||||
ot: err('ReqlNonExistenceError', 'Expected type NUMBER but found NULL.', [0])
|
@ -1,363 +0,0 @@
|
||||
desc: Tests of converstion to and from the RQL binary type
|
||||
tests:
|
||||
|
||||
# Short binary data from 0 to 12 characters
|
||||
# Not fully implemented for JS as comparing Buffer objects is non-trivial
|
||||
- def:
|
||||
rb: s = "".force_encoding('BINARY')
|
||||
py: s = b''
|
||||
js: s = Buffer("", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 0
|
||||
|
||||
- def:
|
||||
rb: s = "\x00".force_encoding('BINARY')
|
||||
py: s = b'\x00'
|
||||
js: s = Buffer("\x00", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 1
|
||||
|
||||
- def:
|
||||
rb: s = "\x00\x42".force_encoding('BINARY')
|
||||
py: s = b'\x00\x42'
|
||||
js: s = Buffer("\x00\x42", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 2
|
||||
|
||||
- def:
|
||||
rb: s = "\x00\xfe\x7a".force_encoding('BINARY')
|
||||
py: s = b'\x00\xfe\x7a'
|
||||
js: s = Buffer("\x00\xfe\x7a", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 3
|
||||
|
||||
- def:
|
||||
rb: s = "\xed\xfe\x00\xba".force_encoding('BINARY')
|
||||
py: s = b'\xed\xfe\x00\xba'
|
||||
js: s = Buffer("\xed\xfe\x00\xba", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 4
|
||||
|
||||
- def:
|
||||
rb: s = "\x50\xf9\x00\x77\xf9".force_encoding('BINARY')
|
||||
py: s = b'\x50\xf9\x00\x77\xf9'
|
||||
js: s = Buffer("\x50\xf9\x00\x77\xf9", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 5
|
||||
|
||||
- def:
|
||||
rb: s = "\x2f\xe3\xb5\x57\x00\x92".force_encoding('BINARY')
|
||||
py: s = b'\x2f\xe3\xb5\x57\x00\x92'
|
||||
js: s = Buffer("\x2f\xe3\xb5\x57\x00\x92", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 6
|
||||
|
||||
- def:
|
||||
rb: s = "\xa9\x43\x54\xe9\x00\xf8\xfb".force_encoding('BINARY')
|
||||
py: s = b'\xa9\x43\x54\xe9\x00\xf8\xfb'
|
||||
js: s = Buffer("\xa9\x43\x54\xe9\x00\xf8\xfb", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 7
|
||||
|
||||
- def:
|
||||
rb: s = "\x57\xbb\xe5\x82\x8b\xd3\x00\xf9".force_encoding('BINARY')
|
||||
py: s = b'\x57\xbb\xe5\x82\x8b\xd3\x00\xf9'
|
||||
js: s = Buffer("\x57\xbb\xe5\x82\x8b\xd3\x00\xf9", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 8
|
||||
|
||||
- def:
|
||||
rb: s = "\x44\x1b\x3e\x00\x13\x19\x29\x2a\xbf".force_encoding('BINARY')
|
||||
py: s = b'\x44\x1b\x3e\x00\x13\x19\x29\x2a\xbf'
|
||||
js: s = Buffer("\x44\x1b\x3e\x00\x13\x19\x29\x2a\xbf", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 9
|
||||
|
||||
- def:
|
||||
rb: s = "\x8a\x1d\x09\x00\x5d\x60\x6b\x2e\x70\xd9".force_encoding('BINARY')
|
||||
py: s = b'\x8a\x1d\x09\x00\x5d\x60\x6b\x2e\x70\xd9'
|
||||
js: s = Buffer("\x8a\x1d\x09\x00\x5d\x60\x6b\x2e\x70\xd9", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 10
|
||||
|
||||
- def:
|
||||
rb: s = "\x00\xaf\x47\x4b\x38\x99\x14\x8d\x8f\x10\x51".force_encoding('BINARY')
|
||||
py: s = b'\x00\xaf\x47\x4b\x38\x99\x14\x8d\x8f\x10\x51'
|
||||
js: s = Buffer("\x00\xaf\x47\x4b\x38\x99\x14\x8d\x8f\x10\x51", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 11
|
||||
|
||||
- def:
|
||||
cd: s = "\x45\x39\x00\xf7\xc2\x37\xfd\xe0\x38\x82\x40\xa9".force_encoding('BINARY')
|
||||
py: s = b'\x45\x39\x00\xf7\xc2\x37\xfd\xe0\x38\x82\x40\xa9'
|
||||
js: s = Buffer("\x45\x39\x00\xf7\xc2\x37\xfd\xe0\x38\x82\x40\xa9", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
- cd: r.binary(s).count()
|
||||
ot: 12
|
||||
|
||||
# Test comparisons
|
||||
# Binary objects to use, in order of increasing value
|
||||
- def:
|
||||
js: a = Buffer("\x00", 'binary')
|
||||
rb: a = "\x00".force_encoding('BINARY')
|
||||
py: a = b'\x00'
|
||||
- def:
|
||||
js: b = Buffer("\x00\x01", 'binary')
|
||||
rb: b = "\x00\x01".force_encoding('BINARY')
|
||||
py: b = b'\x00\x01'
|
||||
- def:
|
||||
js: c = Buffer("\x01", 'binary')
|
||||
rb: c = "\x01".force_encoding('BINARY')
|
||||
py: c = b'\x01'
|
||||
- def:
|
||||
js: d = Buffer("\x70\x22", 'binary')
|
||||
rb: d = "\x70\x22".force_encoding('BINARY')
|
||||
py: d = b'\x70\x22'
|
||||
- def:
|
||||
js: e = Buffer("\x80", 'binary')
|
||||
rb: e = "\x80".force_encoding('BINARY')
|
||||
py: e = b'\x80'
|
||||
- def:
|
||||
js: f = Buffer("\xFE", 'binary')
|
||||
rb: f = "\xFE".force_encoding('BINARY')
|
||||
py: f = b'\xFE'
|
||||
|
||||
# a -> a
|
||||
- cd: r.binary(a).eq(r.binary(a))
|
||||
ot: true
|
||||
- cd: r.binary(a).le(r.binary(a))
|
||||
ot: true
|
||||
- cd: r.binary(a).ge(r.binary(a))
|
||||
ot: true
|
||||
- cd: r.binary(a).ne(r.binary(a))
|
||||
ot: false
|
||||
- cd: r.binary(a).lt(r.binary(a))
|
||||
ot: false
|
||||
- cd: r.binary(a).gt(r.binary(a))
|
||||
ot: false
|
||||
|
||||
# a -> b
|
||||
- cd: r.binary(a).ne(r.binary(b))
|
||||
ot: true
|
||||
- cd: r.binary(a).lt(r.binary(b))
|
||||
ot: true
|
||||
- cd: r.binary(a).le(r.binary(b))
|
||||
ot: true
|
||||
- cd: r.binary(a).ge(r.binary(b))
|
||||
ot: false
|
||||
- cd: r.binary(a).gt(r.binary(b))
|
||||
ot: false
|
||||
- cd: r.binary(a).eq(r.binary(b))
|
||||
ot: false
|
||||
|
||||
# b -> c
|
||||
- cd: r.binary(b).ne(r.binary(c))
|
||||
ot: true
|
||||
- cd: r.binary(b).lt(r.binary(c))
|
||||
ot: true
|
||||
- cd: r.binary(b).le(r.binary(c))
|
||||
ot: true
|
||||
- cd: r.binary(b).ge(r.binary(c))
|
||||
ot: false
|
||||
- cd: r.binary(b).gt(r.binary(c))
|
||||
ot: false
|
||||
- cd: r.binary(b).eq(r.binary(c))
|
||||
ot: false
|
||||
|
||||
# c -> d
|
||||
- cd: r.binary(c).ne(r.binary(d))
|
||||
ot: true
|
||||
- cd: r.binary(c).lt(r.binary(d))
|
||||
ot: true
|
||||
- cd: r.binary(c).le(r.binary(d))
|
||||
ot: true
|
||||
- cd: r.binary(c).ge(r.binary(d))
|
||||
ot: false
|
||||
- cd: r.binary(c).gt(r.binary(d))
|
||||
ot: false
|
||||
- cd: r.binary(c).eq(r.binary(d))
|
||||
ot: false
|
||||
|
||||
# d -> e
|
||||
- cd: r.binary(d).ne(r.binary(e))
|
||||
ot: true
|
||||
- cd: r.binary(d).lt(r.binary(e))
|
||||
ot: true
|
||||
- cd: r.binary(d).le(r.binary(e))
|
||||
ot: true
|
||||
- cd: r.binary(d).ge(r.binary(e))
|
||||
ot: false
|
||||
- cd: r.binary(d).gt(r.binary(e))
|
||||
ot: false
|
||||
- cd: r.binary(d).eq(r.binary(e))
|
||||
ot: false
|
||||
|
||||
# e -> f
|
||||
- cd: r.binary(e).ne(r.binary(f))
|
||||
ot: true
|
||||
- cd: r.binary(e).lt(r.binary(f))
|
||||
ot: true
|
||||
- cd: r.binary(e).le(r.binary(f))
|
||||
ot: true
|
||||
- cd: r.binary(e).ge(r.binary(f))
|
||||
ot: false
|
||||
- cd: r.binary(e).gt(r.binary(f))
|
||||
ot: false
|
||||
- cd: r.binary(e).eq(r.binary(f))
|
||||
ot: false
|
||||
|
||||
# f -> f
|
||||
- cd: r.binary(f).eq(r.binary(f))
|
||||
ot: true
|
||||
- cd: r.binary(f).le(r.binary(f))
|
||||
ot: true
|
||||
- cd: r.binary(f).ge(r.binary(f))
|
||||
ot: true
|
||||
- cd: r.binary(f).ne(r.binary(f))
|
||||
ot: false
|
||||
- cd: r.binary(f).lt(r.binary(f))
|
||||
ot: false
|
||||
- cd: r.binary(f).gt(r.binary(f))
|
||||
ot: false
|
||||
|
||||
# Test encodings
|
||||
- py:
|
||||
cd: r.binary(u'イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム'.encode('utf-8'))
|
||||
ot: u'イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム'.encode('utf-8')
|
||||
py3:
|
||||
cd: r.binary(str('イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム').encode('utf-8'))
|
||||
ot: str('イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム').encode('utf-8')
|
||||
- py:
|
||||
cd: r.binary(u'ƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏ'.encode('utf-16'))
|
||||
ot: u'ƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏ'.encode('utf-16')
|
||||
py3:
|
||||
cd: r.binary(str('ƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏ').encode('utf-16'))
|
||||
ot: str('ƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏ').encode('utf-16')
|
||||
- py:
|
||||
cd: r.binary(u'lorem ipsum'.encode('ascii'))
|
||||
ot: u'lorem ipsum'.encode('ascii')
|
||||
py3:
|
||||
cd: r.binary(str('lorem ipsum').encode('ascii'))
|
||||
ot: str('lorem ipsum').encode('ascii')
|
||||
|
||||
# Test coercions
|
||||
- py: r.binary(b'foo').coerce_to('string')
|
||||
ot: 'foo'
|
||||
- py:
|
||||
cd: r.binary(u'イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム'.encode('utf-8')).coerce_to('string')
|
||||
ot: u'イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム'
|
||||
py3:
|
||||
cd: r.binary(str('イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム').encode('utf-8')).coerce_to('string')
|
||||
ot: str('イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム')
|
||||
- py:
|
||||
cd: r.binary(u'lorem ipsum'.encode('ascii')).coerce_to('string')
|
||||
ot: u'lorem ipsum'
|
||||
py3:
|
||||
cd: r.binary(str('lorem ipsum').encode('ascii')).coerce_to('string')
|
||||
ot: str('lorem ipsum')
|
||||
|
||||
- py: r.expr('foo').coerce_to('binary')
|
||||
ot: b'foo'
|
||||
|
||||
- cd: r.binary(a).coerce_to('bool')
|
||||
ot: True
|
||||
|
||||
- py: r.binary(b'foo').coerce_to('binary')
|
||||
ot: b'foo'
|
||||
|
||||
# Test slice
|
||||
- py: r.binary(b'abcdefg').slice(-3,-1)
|
||||
ot: b'ef'
|
||||
- py: r.binary(b'abcdefg').slice(0, 2)
|
||||
ot: b'ab'
|
||||
- py: r.binary(b'abcdefg').slice(3, -1)
|
||||
ot: b'def'
|
||||
- py: r.binary(b'abcdefg').slice(-5, 5)
|
||||
ot: b'cde'
|
||||
- py: r.binary(b'abcdefg').slice(-8, 2)
|
||||
ot: b'ab'
|
||||
- py: r.binary(b'abcdefg').slice(5, 7)
|
||||
ot: b'fg'
|
||||
|
||||
# Left side out-of-bound should clamp to index 0
|
||||
- py: r.binary(b'abcdefg').slice(-9, 2)
|
||||
ot: b'ab'
|
||||
|
||||
# Right side out-of-bound should return the valid subset of the range
|
||||
- py: r.binary(b'abcdefg').slice(5, 9)
|
||||
ot: b'fg'
|
||||
|
||||
# Test binary_format optarg
|
||||
- cd: r.binary(b)
|
||||
runopts:
|
||||
binary_format: "native"
|
||||
ot: b
|
||||
- cd: r.binary(b)
|
||||
runopts:
|
||||
binary_format: "raw"
|
||||
ot: {'$reql_type$':'BINARY','data':'AAE='}
|
||||
|
||||
# Test r.binary of nested terms
|
||||
- cd: r.binary(r.expr("data"))
|
||||
ot:
|
||||
js: Buffer("data", "binary")
|
||||
rb: "data"
|
||||
py: b"data"
|
||||
|
||||
- cd: r.binary(r.expr({}))
|
||||
ot: err('ReqlQueryLogicError', 'Expected type STRING but found OBJECT.', [])
|
||||
|
||||
- cd: r.binary(r.expr([]))
|
||||
ot: err('ReqlQueryLogicError', 'Expected type STRING but found ARRAY.', [])
|
||||
|
||||
# Test errors
|
||||
|
||||
# Missing 'data' field
|
||||
- py: r.expr({'$reql_type$':'BINARY'})
|
||||
rb: r.expr({'$reql_type$':'BINARY'})
|
||||
ot: err('ReqlQueryLogicError','Invalid binary pseudotype:'+' lacking `data` key.',[])
|
||||
|
||||
# Invalid base64 format
|
||||
- py: r.expr({'$reql_type$':'BINARY','data':'ABCDEFGH==AA'})
|
||||
ot: err('ReqlQueryLogicError','Invalid base64 format, data found after padding character \'=\'.',[])
|
||||
- py: r.expr({'$reql_type$':'BINARY','data':'ABCDEF==$'})
|
||||
ot: err('ReqlQueryLogicError','Invalid base64 format, data found after padding character \'=\'.',[])
|
||||
- py: r.expr({'$reql_type$':'BINARY','data':'A^CDEFGH'})
|
||||
ot: err('ReqlQueryLogicError','Invalid base64 character found:'+' \'^\'.',[])
|
||||
- py: r.expr({'$reql_type$':'BINARY','data':'ABCDE'})
|
||||
ot: err('ReqlQueryLogicError','Invalid base64 length:'+' 1 character remaining, cannot decode a full byte.',[])
|
||||
|
||||
# Invalid coercions
|
||||
- cd: r.binary(a).coerce_to('array')
|
||||
ot: err('ReqlQueryLogicError','Cannot coerce BINARY to ARRAY.',[])
|
||||
- cd: r.binary(a).coerce_to('object')
|
||||
ot: err('ReqlQueryLogicError','Cannot coerce BINARY to OBJECT.',[])
|
||||
- cd: r.binary(a).coerce_to('number')
|
||||
ot: err('ReqlQueryLogicError','Cannot coerce BINARY to NUMBER.',[])
|
||||
- cd: r.binary(a).coerce_to('nu'+'ll')
|
||||
ot: err('ReqlQueryLogicError','Cannot coerce BINARY to NULL.',[])
|
@ -1,47 +0,0 @@
|
||||
desc: Tests of conversion to and from the RQL bool type
|
||||
tests:
|
||||
- py: r.expr(True)
|
||||
js:
|
||||
- r.expr(true)
|
||||
- r(true)
|
||||
rb: r true
|
||||
ot: true
|
||||
|
||||
- py: r.expr(False)
|
||||
js:
|
||||
- r.expr(false)
|
||||
- r(false)
|
||||
rb: r false
|
||||
ot: false
|
||||
|
||||
- cd: r.expr(False).type_of()
|
||||
ot: 'BOOL'
|
||||
|
||||
# test coercions
|
||||
- cd: r.expr(True).coerce_to('string')
|
||||
ot: 'true'
|
||||
|
||||
- cd: r.expr(True).coerce_to('bool')
|
||||
ot: True
|
||||
|
||||
- cd: r.expr(False).coerce_to('bool')
|
||||
ot: False
|
||||
|
||||
- cd: r.expr(null).coerce_to('bool')
|
||||
ot: False
|
||||
|
||||
- cd: r.expr(0).coerce_to('bool')
|
||||
ot: True
|
||||
|
||||
- cd: r.expr('false').coerce_to('bool')
|
||||
ot: True
|
||||
|
||||
- cd: r.expr('foo').coerce_to('bool')
|
||||
ot: True
|
||||
|
||||
- cd: r.expr([]).coerce_to('bool')
|
||||
ot: True
|
||||
|
||||
- cd: r.expr({}).coerce_to('bool')
|
||||
ot: True
|
||||
|
@ -1,18 +0,0 @@
|
||||
desc: Tests of conversion to and from the RQL null type
|
||||
tests:
|
||||
- cd:
|
||||
- r(null)
|
||||
- r.expr(null)
|
||||
py: r.expr(null)
|
||||
ot: (null)
|
||||
|
||||
- cd: r.expr(null).type_of()
|
||||
rb: r(null).type_of()
|
||||
ot: 'NULL'
|
||||
|
||||
# test coercions
|
||||
- cd: r.expr(null).coerce_to('string')
|
||||
ot: 'null'
|
||||
|
||||
- cd: r.expr(null).coerce_to('null')
|
||||
ot: null
|
@ -1,125 +0,0 @@
|
||||
# desc will be included in a comment to help identify test groups
|
||||
desc: Tests of conversion to and from the RQL number type
|
||||
tests:
|
||||
|
||||
# Simple integers
|
||||
- cd: r.expr(1)
|
||||
js:
|
||||
- r(1)
|
||||
- r.expr(1)
|
||||
rb:
|
||||
- r 1
|
||||
- r(1)
|
||||
- r.expr(1)
|
||||
ot: 1
|
||||
- cd: r.expr(-1)
|
||||
js:
|
||||
- r(-1)
|
||||
- r.expr(-1)
|
||||
rb:
|
||||
- r -1
|
||||
- r(-1)
|
||||
- r.expr(-1)
|
||||
ot: -1
|
||||
- cd: r.expr(0)
|
||||
js:
|
||||
- r(0)
|
||||
- r.expr(0)
|
||||
rb:
|
||||
- r 0
|
||||
- r(0)
|
||||
- r.expr(0)
|
||||
ot: 0
|
||||
|
||||
# Floats
|
||||
- cd: r.expr(1.0)
|
||||
js:
|
||||
- r(1.0)
|
||||
- r.expr(1.0)
|
||||
rb:
|
||||
- r 1.0
|
||||
- r(1.0)
|
||||
- r.expr(1.0)
|
||||
ot: 1.0
|
||||
- cd: r.expr(1.5)
|
||||
js:
|
||||
- r(1.5)
|
||||
- r.expr(1.5)
|
||||
rb:
|
||||
- r 1.5
|
||||
- r(1.5)
|
||||
- r.expr(1.5)
|
||||
ot: 1.5
|
||||
- cd: r.expr(-0.5)
|
||||
js:
|
||||
- r(-0.5)
|
||||
- r.expr(-0.5)
|
||||
rb:
|
||||
- r -0.5
|
||||
- r(-0.5)
|
||||
- r.expr(-0.5)
|
||||
ot: -0.5
|
||||
- cd: r.expr(67498.89278)
|
||||
js:
|
||||
- r(67498.89278)
|
||||
- r.expr(67498.89278)
|
||||
rb:
|
||||
- r 67498.89278
|
||||
- r(67498.89278)
|
||||
- r.expr(67498.89278)
|
||||
ot: 67498.89278
|
||||
|
||||
# Big numbers
|
||||
- cd: r.expr(1234567890)
|
||||
js:
|
||||
- r(1234567890)
|
||||
- r.expr(1234567890)
|
||||
rb:
|
||||
- r 1234567890
|
||||
- r(1234567890)
|
||||
- r.expr(1234567890)
|
||||
ot: 1234567890
|
||||
|
||||
- cd: r.expr(-73850380122423)
|
||||
js:
|
||||
- r.expr(-73850380122423)
|
||||
- r(-73850380122423)
|
||||
rb:
|
||||
- r -73850380122423
|
||||
- r.expr(-73850380122423)
|
||||
- r(-73850380122423)
|
||||
ot: -73850380122423
|
||||
|
||||
# Test that numbers round-trip correctly
|
||||
- py:
|
||||
cd: r.expr(1234567890123456789012345678901234567890)
|
||||
ot: float(1234567890123456789012345678901234567890)
|
||||
js:
|
||||
cd: r.expr(1234567890123456789012345678901234567890)
|
||||
ot: 1234567890123456789012345678901234567890
|
||||
- cd: r.expr(123.4567890123456789012345678901234567890)
|
||||
ot: 123.4567890123456789012345678901234567890
|
||||
|
||||
- cd: r.expr(1).type_of()
|
||||
ot: 'NUMBER'
|
||||
|
||||
# test coercions
|
||||
- cd: r.expr(1).coerce_to('string')
|
||||
ot: '1'
|
||||
|
||||
- cd: r.expr(1).coerce_to('number')
|
||||
ot: 1
|
||||
|
||||
# The drivers now convert to an int (where relevant) if we think the result
|
||||
# looks like an int (result % 1.0 == 0.0)
|
||||
- py: r.expr(1.0)
|
||||
rb: r 1.0
|
||||
ot: int_cmp(1)
|
||||
|
||||
- py: r.expr(45)
|
||||
rb: r 45
|
||||
ot: int_cmp(45)
|
||||
|
||||
- py: r.expr(1.2)
|
||||
rb: r 1.2
|
||||
ot: float_cmp(1.2)
|
@ -1,85 +0,0 @@
|
||||
desc: Tests conversion to and from the RQL object type
|
||||
tests:
|
||||
- cd:
|
||||
- r({})
|
||||
- r.expr({})
|
||||
py: r.expr({})
|
||||
ot: {}
|
||||
- cd:
|
||||
- r({a:1})
|
||||
- r.expr({'a':1})
|
||||
py: r.expr({'a':1})
|
||||
ot: {'a':1}
|
||||
- cd:
|
||||
- r({a:1, b:'two', c:True})
|
||||
- r.expr({'a':1, 'b':'two', 'c':True})
|
||||
py: r.expr({'a':1, 'b':'two', 'c':True})
|
||||
ot: {'a':1, 'b':'two', 'c':True}
|
||||
|
||||
# Nested expressions
|
||||
- cd: r.expr({'a':r.expr(1)})
|
||||
ot: {'a':1}
|
||||
|
||||
- cd: r.expr({'a':{'b':[{'c':2}, 'a', 4]}})
|
||||
ot: {'a':{'b':[{'c':2}, 'a', 4]}}
|
||||
|
||||
- cd: r.expr({'a':1}).type_of()
|
||||
ot: 'OBJECT'
|
||||
|
||||
# test coercions
|
||||
- cd: r.expr({'a':1}).coerce_to('string')
|
||||
ot:
|
||||
cd: '{"a":1}'
|
||||
|
||||
- cd: r.expr({'a':1}).coerce_to('object')
|
||||
ot: {'a':1}
|
||||
|
||||
- cd: r.expr({'a':1}).coerce_to('array')
|
||||
ot: [['a',1]]
|
||||
|
||||
# Error cases
|
||||
- cd: r.expr({12:'a'})
|
||||
# JavaScript auto-converts keys for us
|
||||
js:
|
||||
ot: err_regex("ReqlCompileError", "Object keys must be strings.*")
|
||||
|
||||
- cd: r.expr({'a':{12:'b'}})
|
||||
# JavaScript auto-converts keys for us
|
||||
js:
|
||||
ot: err_regex("ReqlCompileError", "Object keys must be strings.*")
|
||||
|
||||
- js: r({'a':undefined})
|
||||
ot: err("ReqlCompileError", "Object field 'a' may not be undefined")
|
||||
|
||||
- js: r({'a':{'b':undefined}})
|
||||
ot: err("ReqlCompileError", "Object field 'b' may not be undefined")
|
||||
|
||||
- cd: r.expr({}, "foo")
|
||||
ot:
|
||||
cd: err("ReqlCompileError", "Second argument to `r.expr` must be a number.")
|
||||
js: err("ReqlCompileError", "Second argument to `r.expr` must be a number or undefined.")
|
||||
|
||||
- js: r.expr({}, NaN)
|
||||
ot: err("ReqlCompileError", "Second argument to `r.expr` must be a number or undefined.")
|
||||
|
||||
# r.object
|
||||
- cd: r.object()
|
||||
ot: {}
|
||||
|
||||
- cd: r.object('a', 1, 'b', 2)
|
||||
ot: {'a':1,'b':2}
|
||||
|
||||
- cd: r.object('c'+'d', 3)
|
||||
ot: {'cd':3}
|
||||
|
||||
- cd: r.object('o','d','d')
|
||||
ot: err("ReqlQueryLogicError", "OBJECT expects an even number of arguments (but found 3).", [])
|
||||
|
||||
- cd: r.object(1, 1)
|
||||
ot: err("ReqlQueryLogicError","Expected type STRING but found NUMBER.",[])
|
||||
|
||||
- cd: r.object('e', 4, 'e', 5)
|
||||
ot: err("ReqlQueryLogicError","Duplicate key \"e\" in object. (got 4 and 5 as values)",[])
|
||||
|
||||
- cd: r.object('g', r.db('test'))
|
||||
ot: err("ReqlQueryLogicError","Expected type DATUM but found DATABASE:",[])
|
@ -1,329 +0,0 @@
|
||||
desc: Tests of converstion to and from the RQL string type
|
||||
tests:
|
||||
|
||||
- def:
|
||||
cd: japanese_hello = 'こんにちは'
|
||||
# Python supports unicode strings with the u'' pattern, except 3.0-3.2
|
||||
py: japanese_hello = u'こんにちは'
|
||||
py3.0: japanese_hello = 'こんにちは'
|
||||
py3.1: japanese_hello = 'こんにちは'
|
||||
py3.2: japanese_hello = 'こんにちは'
|
||||
|
||||
# Simple strings
|
||||
- cd:
|
||||
- r('str')
|
||||
- r.expr('str')
|
||||
py: r.expr('str')
|
||||
ot: "str"
|
||||
- cd:
|
||||
- r("str")
|
||||
- r.expr("str")
|
||||
py: r.expr("str")
|
||||
ot: "str"
|
||||
|
||||
# Unicode
|
||||
|
||||
- cd:
|
||||
py:
|
||||
cd: r.expr(u'str')
|
||||
ot: u'str'
|
||||
py3.0: r.expr('str')
|
||||
py3.1: r.expr('str')
|
||||
py3.2: r.expr('str')
|
||||
ot: 'str'
|
||||
|
||||
- cd: r.expr(japanese_hello)
|
||||
ot:
|
||||
cd: 'こんにちは'
|
||||
py: u'こんにちは'
|
||||
py3.0: 'こんにちは'
|
||||
py3.1: 'こんにちは'
|
||||
py3.2: 'こんにちは'
|
||||
|
||||
- cd: r.expr('foo').type_of()
|
||||
ot: 'STRING'
|
||||
|
||||
# test coercions
|
||||
- cd: r.expr('foo').coerce_to('string')
|
||||
ot: 'foo'
|
||||
- cd: r.expr('-1.2').coerce_to('NUMBER')
|
||||
ot: -1.2
|
||||
- cd: r.expr('--1.2').coerce_to('NUMBER')
|
||||
ot: err("ReqlQueryLogicError", "Could not coerce `--1.2` to NUMBER.", [])
|
||||
- cd: r.expr('-1.2-').coerce_to('NUMBER')
|
||||
ot: err("ReqlQueryLogicError", "Could not coerce `-1.2-` to NUMBER.", [])
|
||||
- cd: r.expr('0xa').coerce_to('NUMBER')
|
||||
ot: 10
|
||||
- cd: r.expr('inf').coerce_to('NUMBER')
|
||||
ot: err("ReqlQueryLogicError", "Non-finite number: inf", [])
|
||||
|
||||
# count is defined as the number of unicode codepoints
|
||||
- cd: r.expr('hello, world!').count()
|
||||
ot: 13
|
||||
- cd: r.expr(japanese_hello).count()
|
||||
ot: 5
|
||||
|
||||
# slice is defined on unicode codepoints
|
||||
- cd: r.expr('hello').slice(1)
|
||||
ot: 'ello'
|
||||
- cd: r.expr('hello').slice(-1)
|
||||
ot: 'o'
|
||||
- cd: r.expr('hello').slice(-4,3)
|
||||
ot: 'el'
|
||||
- cd: r.expr('hello').slice(-99)
|
||||
ot: 'hello'
|
||||
- cd: r.expr('hello').slice(0)
|
||||
ot: 'hello'
|
||||
- cd: r.expr(japanese_hello).slice(1)
|
||||
ot:
|
||||
cd: 'んにちは'
|
||||
py: u'んにちは'
|
||||
py3.0: 'んにちは'
|
||||
py3.1: 'んにちは'
|
||||
py3.2: 'んにちは'
|
||||
- cd: r.expr(japanese_hello).slice(1,2)
|
||||
ot:
|
||||
cd: 'ん'
|
||||
py: u'ん'
|
||||
py3.0: 'ん'
|
||||
py3.1: 'ん'
|
||||
py3.2: 'ん'
|
||||
- cd: r.expr(japanese_hello).slice(-3)
|
||||
ot:
|
||||
cd: 'にちは'
|
||||
py: u'にちは'
|
||||
py3.0: 'にちは'
|
||||
py3.1: 'にちは'
|
||||
py3.2: 'にちは'
|
||||
|
||||
# This is how these edge cases are handled in Python.
|
||||
- cd: r.expr('').split()
|
||||
ot: []
|
||||
- cd: r.expr('').split(null)
|
||||
ot: []
|
||||
- cd: r.expr('').split(' ')
|
||||
ot: ['']
|
||||
- cd: r.expr('').split('')
|
||||
ot: []
|
||||
- cd: r.expr('').split(null, 5)
|
||||
ot: []
|
||||
- cd: r.expr('').split(' ', 5)
|
||||
ot: ['']
|
||||
- cd: r.expr('').split('', 5)
|
||||
ot: []
|
||||
|
||||
- cd: r.expr('aaaa bbbb cccc ').split()
|
||||
ot: ['aaaa', 'bbbb', 'cccc']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(null)
|
||||
ot: ['aaaa', 'bbbb', 'cccc']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(' ')
|
||||
ot: ['aaaa', 'bbbb', '', 'cccc', '']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('')
|
||||
ot: ['a', 'a', 'a', 'a', ' ', 'b', 'b', 'b', 'b', ' ', ' ', 'c', 'c', 'c', 'c', ' ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('b')
|
||||
ot: ['aaaa ', '', '', '', ' cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('bb')
|
||||
ot: ['aaaa ', '', ' cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(' bbbb ')
|
||||
ot: ['aaaa', 'cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split('bb')
|
||||
ot: ['aaaa ', '', ' cccc b d ', ' e ', '', ' f']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ')
|
||||
ot: ['aaaa', 'cccc b d bb e bbbb f']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ')
|
||||
ot: ['aaaa', 'cccc b d bb e', 'f']
|
||||
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(null, 3)
|
||||
ot: ['aaaa', 'bbbb', 'cccc']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(' ', 5)
|
||||
ot: ['aaaa', 'bbbb', '', 'cccc', '']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('', 5)
|
||||
ot: ['a', 'a', 'a', 'a', ' ', 'bbbb cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('b', 5)
|
||||
ot: ['aaaa ', '', '', '', ' cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('bb', 3)
|
||||
ot: ['aaaa ', '', ' cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(' bbbb ', 2)
|
||||
ot: ['aaaa', 'cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split('bb', 6)
|
||||
ot: ['aaaa ', '', ' cccc b d ', ' e ', '', ' f']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 2)
|
||||
ot: ['aaaa', 'cccc b d bb e bbbb f']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 3)
|
||||
ot: ['aaaa', 'cccc b d bb e', 'f']
|
||||
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(null, 2)
|
||||
ot: ['aaaa', 'bbbb', 'cccc ']
|
||||
- cd: r.expr("a b ").split(null, 2)
|
||||
ot: ["a", "b"]
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(' ', 4)
|
||||
ot: ['aaaa', 'bbbb', '', 'cccc', '']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('', 4)
|
||||
ot: ['a', 'a', 'a', 'a', ' bbbb cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('b', 4)
|
||||
ot: ['aaaa ', '', '', '', ' cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('bb', 2)
|
||||
ot: ['aaaa ', '', ' cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(' bbbb ', 1)
|
||||
ot: ['aaaa', 'cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split('bb', 5)
|
||||
ot: ['aaaa ', '', ' cccc b d ', ' e ', '', ' f']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 1)
|
||||
ot: ['aaaa', 'cccc b d bb e bbbb f']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 2)
|
||||
ot: ['aaaa', 'cccc b d bb e', 'f']
|
||||
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(null, 1)
|
||||
ot: ['aaaa', 'bbbb cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(' ', 2)
|
||||
ot: ['aaaa', 'bbbb', ' cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('', 2)
|
||||
ot: ['a', 'a', 'aa bbbb cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('b', 2)
|
||||
ot: ['aaaa ', '', 'bb cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split('bb', 2)
|
||||
ot: ['aaaa ', '', ' cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc ').split(' bbbb ', 2)
|
||||
ot: ['aaaa', 'cccc ']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split('bb', 2)
|
||||
ot: ['aaaa ', '', ' cccc b d bb e bbbb f']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 2)
|
||||
ot: ['aaaa', 'cccc b d bb e bbbb f']
|
||||
- cd: r.expr('aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 2)
|
||||
ot: ['aaaa', 'cccc b d bb e', 'f']
|
||||
|
||||
- cd: r.expr(' ').split()
|
||||
ot: []
|
||||
- cd: r.expr(' ').split(null)
|
||||
ot: []
|
||||
- cd: r.expr(' ').split(' ')
|
||||
ot: ['', '', '']
|
||||
- cd: r.expr(' ').split(null, 5)
|
||||
ot: []
|
||||
- cd: r.expr(' ').split(' ', 5)
|
||||
ot: ['', '', '']
|
||||
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split()
|
||||
ot: ['aaaa', 'bbbb', 'cccc']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(null)
|
||||
ot: ['aaaa', 'bbbb', 'cccc']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(' ')
|
||||
ot: ['', '', 'aaaa', 'bbbb', '', 'cccc', '']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split('b')
|
||||
ot: [' aaaa ', '', '', '', ' cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split('bb')
|
||||
ot: [' aaaa ', '', ' cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(' bbbb ')
|
||||
ot: [' aaaa', 'cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split('bb')
|
||||
ot: [' aaaa ', '', ' cccc b d ', ' e ', '', ' f']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ')
|
||||
ot: [' aaaa', 'cccc b d bb e bbbb f']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ')
|
||||
ot: [' aaaa', 'cccc b d bb e', 'f']
|
||||
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(null, 3)
|
||||
ot: ['aaaa', 'bbbb', 'cccc']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(' ', 5)
|
||||
ot: ['', '', 'aaaa', 'bbbb', '', 'cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split('b', 5)
|
||||
ot: [' aaaa ', '', '', '', ' cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split('bb', 3)
|
||||
ot: [' aaaa ', '', ' cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(' bbbb ', 2)
|
||||
ot: [' aaaa', 'cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split('bb', 6)
|
||||
ot: [' aaaa ', '', ' cccc b d ', ' e ', '', ' f']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 2)
|
||||
ot: [' aaaa', 'cccc b d bb e bbbb f']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 3)
|
||||
ot: [' aaaa', 'cccc b d bb e', 'f']
|
||||
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(null, 2)
|
||||
ot: ['aaaa', 'bbbb', 'cccc ']
|
||||
- cd: r.expr("a b ").split(null, 2)
|
||||
ot: ["a", "b"]
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(' ', 4)
|
||||
ot: ['', '', 'aaaa', 'bbbb', ' cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split('b', 4)
|
||||
ot: [' aaaa ', '', '', '', ' cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split('bb', 2)
|
||||
ot: [' aaaa ', '', ' cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(' bbbb ', 1)
|
||||
ot: [' aaaa', 'cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split('bb', 5)
|
||||
ot: [' aaaa ', '', ' cccc b d ', ' e ', '', ' f']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 1)
|
||||
ot: [' aaaa', 'cccc b d bb e bbbb f']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 2)
|
||||
ot: [' aaaa', 'cccc b d bb e', 'f']
|
||||
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(null, 1)
|
||||
ot: ['aaaa', 'bbbb cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(' ', 2)
|
||||
ot: ['', '', 'aaaa bbbb cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split('b', 2)
|
||||
ot: [' aaaa ', '', 'bb cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split('bb', 2)
|
||||
ot: [' aaaa ', '', ' cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc ').split(' bbbb ', 2)
|
||||
ot: [' aaaa', 'cccc ']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split('bb', 2)
|
||||
ot: [' aaaa ', '', ' cccc b d bb e bbbb f']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 2)
|
||||
ot: [' aaaa', 'cccc b d bb e bbbb f']
|
||||
- cd: r.expr(' aaaa bbbb cccc b d bb e bbbb f').split(' bbbb ', 2)
|
||||
ot: [' aaaa', 'cccc b d bb e', 'f']
|
||||
|
||||
- cd: r.expr("abc-dEf-GHJ").upcase()
|
||||
ot: "ABC-DEF-GHJ"
|
||||
- cd: r.expr("abc-dEf-GHJ").downcase()
|
||||
ot: "abc-def-ghj"
|
||||
|
||||
# Same 3.0-3.2 caveats
|
||||
- py:
|
||||
cd: r.expr(u"f\u00e9oo").split("")
|
||||
ot: [u"f", u"\u00e9", u"o", u"o"]
|
||||
py3.0: r.expr("f\u00e9oo").split("")
|
||||
py3.1: r.expr("f\u00e9oo").split("")
|
||||
py3.2: r.expr("f\u00e9oo").split("")
|
||||
cd: r.expr("f\u00e9oo").split("")
|
||||
ot: ["f", "\u00e9", "o", "o"]
|
||||
|
||||
- py:
|
||||
cd: r.expr(u"fe\u0301oo").split("")
|
||||
ot: [u"f", u"e\u0301", u"o", u"o"]
|
||||
py3.0: r.expr("fe\u0301oo").split("")
|
||||
py3.1: r.expr("fe\u0301oo").split("")
|
||||
py3.2: r.expr("fe\u0301oo").split("")
|
||||
cd: r.expr("fe\u0301oo").split("")
|
||||
ot: ["f", "e\u0301", "o", "o"]
|
||||
|
||||
## Unicode spacing characters.
|
||||
|
||||
## original set from previous work:
|
||||
- cd: r.expr("foo bar\tbaz\nquux\rfred\u000bbarney\u000cwilma").split()
|
||||
py:
|
||||
cd: r.expr(u"foo bar\tbaz\nquux\rfred\u000bbarney\u000cwilma").split()
|
||||
ot: ["foo", "bar", "baz", "quux", "fred", "barney", "wilma"]
|
||||
py3.0: r.expr("foo bar\tbaz\nquux\rfred\u000bbarney\u000cwilma").split()
|
||||
py3.1: r.expr("foo bar\tbaz\nquux\rfred\u000bbarney\u000cwilma").split()
|
||||
py3.2: r.expr("foo bar\tbaz\nquux\rfred\u000bbarney\u000cwilma").split()
|
||||
ot: ["foo", "bar", "baz", "quux", "fred", "barney", "wilma"]
|
||||
|
||||
## some specialized Unicode horrors:
|
||||
## - U+00A0 is nonbreaking space and is in the Zs category
|
||||
## - U+0085 is the next line character and is not in the Zs category but is considered whitespace
|
||||
## - U+2001 is em quad space and is in the Zs category
|
||||
## - U+200B is a zero width space and is not in the Zs category and is not considered whitespace
|
||||
## - U+2060 is a word joining zero width nonbreaking space and is NOT in any of the Z categories
|
||||
## - U+2028 is a line separator and is in the Zl category
|
||||
## - U+2029 is a paragraph separator and is in the Zp category
|
||||
- py:
|
||||
cd: r.expr(u"foo\u00a0bar\u2001baz\u2060quux\u2028fred\u2028barney\u2029wilma\u0085betty\u200b").split()
|
||||
ot: ["foo", "bar", u"baz\u2060quux", "fred", "barney", "wilma", u"betty\u200b"]
|
||||
py3.0: r.expr("foo\u00a0bar\u2001baz\u2060quux\u2028fred\u2028barney\u2029wilma\u0085betty\u200b").split()
|
||||
py3.1: r.expr("foo\u00a0bar\u2001baz\u2060quux\u2028fred\u2028barney\u2029wilma\u0085betty\u200b").split()
|
||||
py3.2: r.expr("foo\u00a0bar\u2001baz\u2060quux\u2028fred\u2028barney\u2029wilma\u0085betty\u200b").split()
|
||||
cd: r.expr("foo\u00a0bar\u2001baz\u2060quux\u2028fred\u2028barney\u2029wilma\u0085betty\u200b").split()
|
||||
ot: ["foo", "bar", "baz\u2060quux", "fred", "barney", "wilma", "betty\u200b"]
|
@ -1,14 +0,0 @@
|
||||
desc: These tests test the type of command
|
||||
tests:
|
||||
|
||||
# Method form
|
||||
- cd: r.expr(null).type_of()
|
||||
ot: 'NULL'
|
||||
|
||||
# Prefix form
|
||||
- cd: r.type_of(null)
|
||||
ot: 'NULL'
|
||||
|
||||
# Error cases
|
||||
- js: r(null).typeOf(1)
|
||||
ot: err('ReqlCompileError', 'Expected 1 argument but found 2.', [0])
|
@ -1,20 +0,0 @@
|
||||
desc: Test that UUIDs work
|
||||
tests:
|
||||
- cd: r.uuid()
|
||||
ot: uuid()
|
||||
- cd: r.expr(r.uuid())
|
||||
ot: uuid()
|
||||
- cd: r.type_of(r.uuid())
|
||||
ot: 'STRING'
|
||||
- cd: r.uuid().ne(r.uuid())
|
||||
ot: true
|
||||
- cd: r.uuid('magic')
|
||||
ot: ('97dd10a5-4fc4-554f-86c5-0d2c2e3d5330')
|
||||
- cd: r.uuid('magic').eq(r.uuid('magic'))
|
||||
ot: true
|
||||
- cd: r.uuid('magic').ne(r.uuid('beans'))
|
||||
ot: true
|
||||
- py: r.expr([1,2,3,4,5,6,7,8,9,10]).map(lambda u:r.uuid()).distinct().count()
|
||||
js: r([1,2,3,4,5,6,7,8,9,10]).map(function(u) {return r.uuid();}).distinct().count()
|
||||
rb: r.expr([1,2,3,4,5,6,7,8,9,10]).map {|u| r.uuid()}.distinct().count()
|
||||
ot: 10
|
@ -1,270 +0,0 @@
|
||||
desc: Tests r.default
|
||||
tests:
|
||||
- cd: r.expr(1).default(2)
|
||||
ot: 1
|
||||
- cd: r.expr(null).default(2)
|
||||
ot: 2
|
||||
- cd: r.expr({})['b'].default(2)
|
||||
js: r.expr({})('b').default(2)
|
||||
ot: 2
|
||||
- cd: r.expr(r.expr('a')['b']).default(2)
|
||||
js: r.expr(r.expr('a')('b')).default(2)
|
||||
ot: err("ReqlQueryLogicError", "Cannot perform bracket on a non-object non-sequence `\"a\"`.", [])
|
||||
- rb: r.expr([]).reduce{|a,b| a+b}.default(2)
|
||||
py: r.expr([]).reduce(lambda a,b:a+b).default(2)
|
||||
js: r.expr([]).reduce(function(a,b){return a+b}).default(2)
|
||||
ot: 2
|
||||
- rb: r.expr([]).union([]).reduce{|a,b| a+b}.default(2)
|
||||
py: r.expr([]).union([]).reduce(lambda a,b:a+b).default(2)
|
||||
js: r.expr([]).union([]).reduce(function(a,b){return a+b}).default(2)
|
||||
ot: 2
|
||||
- rb: r.expr('a').reduce{|a,b| a+b}.default(2)
|
||||
py: r.expr('a').reduce(lambda a,b:a+b).default(2)
|
||||
js: r.expr('a').reduce(function(a,b){return a+b}).default(2)
|
||||
ot: err("ReqlQueryLogicError", "Cannot convert STRING to SEQUENCE", [])
|
||||
- cd: (r.expr(null) + 5).default(2)
|
||||
js: (r.expr(null).add(5)).default(2)
|
||||
ot: 2
|
||||
- cd: (5 + r.expr(null)).default(2)
|
||||
js: (r.expr(5).add(null)).default(2)
|
||||
ot: 2
|
||||
- cd: (5 - r.expr(null)).default(2)
|
||||
js: (r.expr(5).sub(null)).default(2)
|
||||
ot: 2
|
||||
- cd: (r.expr(null) - 5).default(2)
|
||||
js: (r.expr(null).sub(5)).default(2)
|
||||
ot: 2
|
||||
- cd: (r.expr('a') + 5).default(2)
|
||||
js: (r.expr('a').add(5)).default(2)
|
||||
ot: err("ReqlQueryLogicError", "Expected type STRING but found NUMBER.", [])
|
||||
- cd: (5 + r.expr('a')).default(2)
|
||||
js: (r.expr(5).add('a')).default(2)
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [])
|
||||
- cd: (r.expr('a') - 5).default(2)
|
||||
js: (r.expr('a').sub(5)).default(2)
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [])
|
||||
- cd: (5 - r.expr('a')).default(2)
|
||||
js: (r.expr(5).sub('a')).default(2)
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [])
|
||||
|
||||
- cd: r.expr(1).default(r.error())
|
||||
ot: 1
|
||||
- cd: r.expr(null).default(r.error())
|
||||
ot: (null)
|
||||
- cd: r.expr({})['b'].default(r.error())
|
||||
js: r.expr({})('b').default(r.error())
|
||||
ot: err("ReqlNonExistenceError", "No attribute `b` in object:", [])
|
||||
- rb: r.expr([]).reduce{|a,b| a+b}.default(r.error)
|
||||
py: r.expr([]).reduce(lambda a,b:a+b).default(r.error)
|
||||
js: r.expr([]).reduce(function(a,b){return a+b}).default(r.error)
|
||||
ot: err("ReqlNonExistenceError", "Cannot reduce over an empty stream.", [])
|
||||
- rb: r.expr([]).union([]).reduce{|a,b| a+b}.default(r.error)
|
||||
py: r.expr([]).union([]).reduce(lambda a,b:a+b).default(r.error)
|
||||
js: r.expr([]).union([]).reduce(function(a,b){return a+b}).default(r.error)
|
||||
ot: err("ReqlNonExistenceError", "Cannot reduce over an empty stream.", [])
|
||||
- cd: (r.expr(null) + 5).default(r.error)
|
||||
js: (r.expr(null).add(5)).default(r.error)
|
||||
ot: err("ReqlNonExistenceError", "Expected type NUMBER but found NULL.", [])
|
||||
- cd: (5 + r.expr(null)).default(r.error)
|
||||
js: (r.expr(5).add(null)).default(r.error)
|
||||
ot: err("ReqlNonExistenceError", "Expected type NUMBER but found NULL.", [])
|
||||
- cd: (5 - r.expr(null)).default(r.error)
|
||||
js: (r.expr(5).sub(null)).default(r.error)
|
||||
ot: err("ReqlNonExistenceError", "Expected type NUMBER but found NULL.", [])
|
||||
- cd: (r.expr(null) - 5).default(r.error)
|
||||
js: (r.expr(null).sub(5)).default(r.error)
|
||||
ot: err("ReqlNonExistenceError", "Expected type NUMBER but found NULL.", [])
|
||||
|
||||
- rb: r.expr(1).default{|e| e}
|
||||
py: r.expr(1).default(lambda e:e)
|
||||
js: r.expr(1).default(function(e){return e})
|
||||
ot: 1
|
||||
- cd: r.expr(null).default{|e| e}
|
||||
py: r.expr(null).default(lambda e:e)
|
||||
js: r.expr(null).default(function(e){return e})
|
||||
ot: (null)
|
||||
- cd: r.expr({})['b'].default{|e| e}
|
||||
py: r.expr({})['b'].default(lambda e:e)
|
||||
js: r.expr({})('b').default(function(e){return e})
|
||||
ot: "No attribute `b` in object:\n{}"
|
||||
- cd: r.expr([]).reduce{|a,b| a+b}.default{|e| e}
|
||||
py: r.expr([]).reduce(lambda a,b:a+b).default(lambda e:e)
|
||||
js: r.expr([]).reduce(function(a,b){return a+b}).default(function(e){return e})
|
||||
ot: ("Cannot reduce over an empty stream.")
|
||||
- cd: r.expr([]).union([]).reduce{|a,b| a+b}.default{|e| e}
|
||||
py: r.expr([]).union([]).reduce(lambda a,b:a+b).default(lambda e:e)
|
||||
js: r.expr([]).union([]).reduce(function(a,b){return a+b}).default(function(e){return e})
|
||||
ot: ("Cannot reduce over an empty stream.")
|
||||
- cd: (r.expr(null) + 5).default{|e| e}
|
||||
py: (r.expr(null) + 5).default(lambda e:e)
|
||||
js: (r.expr(null).add(5)).default(function(e){return e})
|
||||
ot: ("Expected type NUMBER but found NULL.")
|
||||
- cd: (5 + r.expr(null)).default{|e| e}
|
||||
py: (5 + r.expr(null)).default(lambda e:e)
|
||||
js: (r.expr(5).add(null)).default(function(e){return e})
|
||||
ot: ("Expected type NUMBER but found NULL.")
|
||||
- cd: (5 - r.expr(null)).default{|e| e}
|
||||
py: (5 - r.expr(null)).default(lambda e:e)
|
||||
js: (r.expr(5).sub(null)).default(function(e){return e})
|
||||
ot: ("Expected type NUMBER but found NULL.")
|
||||
- cd: (r.expr(null) - 5).default{|e| e}
|
||||
py: (r.expr(null) - 5).default(lambda e:e)
|
||||
js: (r.expr(null).sub(5)).default(function(e){return e})
|
||||
ot: ("Expected type NUMBER but found NULL.")
|
||||
|
||||
- def: arr = r.expr([{'a':1},{'a':null},{}]).order_by('a')
|
||||
|
||||
- cd: arr.filter{|x| x['a'].eq(1)}
|
||||
py: arr.filter(lambda x:x['a'].eq(1))
|
||||
js: arr.filter(function(x){return x('a').eq(1)})
|
||||
ot: [{'a':1}]
|
||||
- cd: arr.filter(:default => false){|x| x['a'].eq(1)}
|
||||
py: arr.filter(lambda x:x['a'].eq(1), default=False)
|
||||
js: arr.filter(function(x){return x('a').eq(1)}, {'default':false})
|
||||
ot: [{'a':1}]
|
||||
- cd: arr.filter(:default => true){|x| x['a'].eq(1)}
|
||||
py: arr.filter(lambda x:x['a'].eq(1), default=True)
|
||||
js: arr.filter(function(x){return x('a').eq(1)}, {'default':true})
|
||||
ot: [{}, {'a':1}]
|
||||
# `null` compares not equal to 1 with no error
|
||||
- cd: arr.filter(:default => r.js('true')){|x| x['a'].eq(1)}
|
||||
py: arr.filter(lambda x:x['a'].eq(1), default=r.js('true'))
|
||||
js: arr.filter(function(x) { return x('a').eq(1) }, { 'default':r.js('true') })
|
||||
ot: [{}, {'a':1}]
|
||||
- cd: arr.filter(:default => r.js('false')){|x| x['a'].eq(1)}
|
||||
py: arr.filter(lambda x:x['a'].eq(1), default=r.js('false'))
|
||||
js: arr.filter(function(x) { return x('a').eq(1) }, { 'default':r.js('false') })
|
||||
ot: [{'a':1}]
|
||||
- cd: arr.filter(:default => r.error){|x| x['a'].eq(1)}
|
||||
py: arr.filter(lambda x:x['a'].eq(1), default=r.error())
|
||||
js: arr.filter(function(x){return x('a').eq(1)}, {'default':r.error()})
|
||||
ot: err("ReqlNonExistenceError", "No attribute `a` in object:", [])
|
||||
|
||||
- cd: r.expr(false).do{|d| arr.filter(:default => d){|x| x['a'].eq(1)}}
|
||||
py: r.expr(False).do(lambda d:arr.filter(lambda x:x['a'].eq(1), default=d))
|
||||
js: r.expr(false).do(function(d){return arr.filter(function(x){return x('a').eq(1)}, {default:d})})
|
||||
ot: [{'a':1}]
|
||||
- cd: r.expr(true).do{|d| arr.filter(:default => d){|x| x['a'].eq(1)}}.orderby('a')
|
||||
py: r.expr(True).do(lambda d:arr.filter(lambda x:x['a'].eq(1), default=d)).order_by('a')
|
||||
js: r.expr(true).do(function(d){return arr.filter(function(x){return x('a').eq(1)}, {default:d})}).orderBy('a')
|
||||
ot: [{}, {'a':1}]
|
||||
# `null` compares not equal to 1 with no error
|
||||
|
||||
- cd: arr.filter{|x| x['a'].default(0).eq(1)}
|
||||
py: arr.filter(lambda x:x['a'].default(0).eq(1))
|
||||
js: arr.filter(function(x){return x('a').default(0).eq(1)})
|
||||
ot: [{'a':1}]
|
||||
- cd: arr.filter{|x| x['a'].default(1).eq(1)}.orderby('a')
|
||||
py: arr.filter(lambda x:x['a'].default(1).eq(1)).order_by('a')
|
||||
js: arr.filter(function(x){return x('a').default(1).eq(1)}).orderBy('a')
|
||||
ot: ([{}, {'a':null}, {'a':1}])
|
||||
- cd: arr.filter{|x| x['a'].default(r.error).eq(1)}
|
||||
py: arr.filter(lambda x:x['a'].default(r.error()).eq(1))
|
||||
js: arr.filter(function(x){return x('a').default(r.error()).eq(1)})
|
||||
ot: [{'a':1}]
|
||||
# gets caught by `filter` default
|
||||
|
||||
- cd: r.expr(0).do{|i| arr.filter{|x| x['a'].default(i).eq(1)}}
|
||||
py: r.expr(0).do(lambda i:arr.filter(lambda x:x['a'].default(i).eq(1)))
|
||||
js: r.expr(0).do(function(i){return arr.filter(function(x){return x('a').default(i).eq(1)})})
|
||||
ot: [{'a':1}]
|
||||
- cd: r.expr(1).do{|i| arr.filter{|x| x['a'].default(i).eq(1)}}.orderby('a')
|
||||
py: r.expr(1).do(lambda i:arr.filter(lambda x:x['a'].default(i).eq(1))).order_by('a')
|
||||
js: r.expr(1).do(function(i){return arr.filter(function(x){return x('a').default(i).eq(1)})}).orderBy('a')
|
||||
ot: ([{},{'a':null},{'a':1}])
|
||||
|
||||
- cd: arr.filter{|x| x['a'].eq(1).or(x['a']['b'].eq(2))}
|
||||
py: arr.filter(lambda x:r.or_(x['a'].eq(1), x['a']['b'].eq(2)))
|
||||
js: arr.filter(function(x){return x('a').eq(1).or(x('a')('b').eq(2))})
|
||||
ot: [{'a':1}]
|
||||
- cd: arr.filter(:default => false){|x| x['a'].eq(1).or(x['a']['b'].eq(2))}
|
||||
py: arr.filter(lambda x:r.or_(x['a'].eq(1), x['a']['b'].eq(2)), default=False)
|
||||
js: arr.filter(function(x){return x('a').eq(1).or(x('a')('b').eq(2))}, {default:false})
|
||||
ot: [{'a':1}]
|
||||
- cd: arr.filter(:default => true){|x| x['a'].eq(1).or(x['a']['b'].eq(2))}.orderby('a')
|
||||
py: arr.filter(lambda x:r.or_(x['a'].eq(1), x['a']['b'].eq(2)), default=True).order_by('a')
|
||||
js: arr.filter(function(x){return x('a').eq(1).or(x('a')('b').eq(2))}, {default:true}).orderBy('a')
|
||||
ot: ([{}, {'a':null}, {'a':1}])
|
||||
- cd: arr.filter(:default => r.error){|x| x['a'].eq(1).or(x['a']['b'].eq(2))}
|
||||
py: arr.filter(lambda x:r.or_(x['a'].eq(1), x['a']['b'].eq(2)), default=r.error())
|
||||
js: arr.filter(function(x){return x('a').eq(1).or(x('a')('b').eq(2))}, {default:r.error()})
|
||||
ot: err("ReqlNonExistenceError", "No attribute `a` in object:", [])
|
||||
|
||||
- cd: r.table_create('default_test')
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- cd: r.table('default_test').insert(arr)
|
||||
ot: ({'deleted':0,'replaced':0,'generated_keys':arrlen(3,uuid()),'unchanged':0,'errors':0,'skipped':0,'inserted':3})
|
||||
|
||||
- def: tbl = r.table('default_test').order_by('a').pluck('a')
|
||||
|
||||
- cd: tbl.filter{|x| x['a'].eq(1)}
|
||||
py: tbl.filter(lambda x:x['a'].eq(1))
|
||||
js: tbl.filter(function(x){return x('a').eq(1)})
|
||||
ot: [{'a':1}]
|
||||
- cd: tbl.filter(:default => false){|x| x['a'].eq(1)}
|
||||
py: tbl.filter(lambda x:x['a'].eq(1), default=False)
|
||||
js: tbl.filter(function(x){return x('a').eq(1)}, {'default':false})
|
||||
ot: [{'a':1}]
|
||||
- cd: tbl.filter(:default => true){|x| x['a'].eq(1)}
|
||||
py: tbl.filter(lambda x:x['a'].eq(1), default=True)
|
||||
js: tbl.filter(function(x){return x('a').eq(1)}, {'default':true})
|
||||
ot: [{}, {'a':1}]
|
||||
# `null` compares not equal to 1 with no error
|
||||
- cd: tbl.filter(:default => r.error){|x| x['a'].eq(1)}
|
||||
py: tbl.filter(lambda x:x['a'].eq(1), default=r.error())
|
||||
js: tbl.filter(function(x){return x('a').eq(1)}, {'default':r.error()})
|
||||
ot: err("ReqlNonExistenceError", "No attribute `a` in object:", [])
|
||||
|
||||
- cd: r.expr(false).do{|d| tbl.filter(:default => d){|x| x['a'].eq(1)}}
|
||||
py: r.expr(False).do(lambda d:tbl.filter(lambda x:x['a'].eq(1), default=d))
|
||||
js: r.expr(false).do(function(d){return tbl.filter(function(x){return x('a').eq(1)}, {default:d})})
|
||||
ot: [{'a':1}]
|
||||
- cd: r.expr(true).do{|d| tbl.filter(:default => d){|x| x['a'].eq(1)}}.orderby('a')
|
||||
py: r.expr(True).do(lambda d:tbl.filter(lambda x:x['a'].eq(1), default=d)).order_by('a')
|
||||
js: r.expr(true).do(function(d){return tbl.filter(function(x){return x('a').eq(1)}, {default:d})}).orderBy('a')
|
||||
ot: [{}, {'a':1}]
|
||||
# `null` compares not equal to 1 with no error
|
||||
|
||||
- cd: tbl.filter{|x| x['a'].default(0).eq(1)}
|
||||
py: tbl.filter(lambda x:x['a'].default(0).eq(1))
|
||||
js: tbl.filter(function(x){return x('a').default(0).eq(1)})
|
||||
ot: [{'a':1}]
|
||||
- cd: tbl.filter{|x| x['a'].default(1).eq(1)}.orderby('a')
|
||||
py: tbl.filter(lambda x:x['a'].default(1).eq(1)).order_by('a')
|
||||
js: tbl.filter(function(x){return x('a').default(1).eq(1)}).orderBy('a')
|
||||
ot: ([{}, {'a':null}, {'a':1}])
|
||||
- cd: tbl.filter{|x| x['a'].default(r.error).eq(1)}
|
||||
py: tbl.filter(lambda x:x['a'].default(r.error()).eq(1))
|
||||
js: tbl.filter(function(x){return x('a').default(r.error()).eq(1)})
|
||||
ot: [{'a':1}]
|
||||
# gets caught by `filter` default
|
||||
|
||||
- cd: r.expr(0).do{|i| tbl.filter{|x| x['a'].default(i).eq(1)}}
|
||||
py: r.expr(0).do(lambda i:tbl.filter(lambda x:x['a'].default(i).eq(1)))
|
||||
js: r.expr(0).do(function(i){return tbl.filter(function(x){return x('a').default(i).eq(1)})})
|
||||
ot: [{'a':1}]
|
||||
- cd: r.expr(1).do{|i| tbl.filter{|x| x['a'].default(i).eq(1)}}.orderby('a')
|
||||
py: r.expr(1).do(lambda i:tbl.filter(lambda x:x['a'].default(i).eq(1))).order_by('a')
|
||||
js: r.expr(1).do(function(i){return tbl.filter(function(x){return x('a').default(i).eq(1)})}).orderBy('a')
|
||||
ot: ([{},{'a':null},{'a':1}])
|
||||
|
||||
- cd: tbl.filter{|x| x['a'].eq(1).or(x['a']['b'].eq(2))}
|
||||
py: tbl.filter(lambda x:r.or_(x['a'].eq(1), x['a']['b'].eq(2)))
|
||||
js: tbl.filter(function(x){return x('a').eq(1).or(x('a')('b').eq(2))})
|
||||
ot: [{'a':1}]
|
||||
- cd: tbl.filter(:default => false){|x| x['a'].eq(1).or(x['a']['b'].eq(2))}
|
||||
py: tbl.filter(lambda x:r.or_(x['a'].eq(1), x['a']['b'].eq(2)), default=False)
|
||||
js: tbl.filter(function(x){return x('a').eq(1).or(x('a')('b').eq(2))}, {default:false})
|
||||
ot: [{'a':1}]
|
||||
- cd: tbl.filter(:default => true){|x| x['a'].eq(1).or(x['a']['b'].eq(2))}.orderby('a')
|
||||
py: tbl.filter(lambda x:r.or_(x['a'].eq(1), x['a']['b'].eq(2)), default=True).order_by('a')
|
||||
js: tbl.filter(function(x){return x('a').eq(1).or(x('a')('b').eq(2))}, {default:true}).orderBy('a')
|
||||
ot: ([{}, {'a':null}, {'a':1}])
|
||||
- cd: tbl.filter(:default => r.error){|x| x['a'].eq(1).or(x['a']['b'].eq(2))}
|
||||
py: tbl.filter(lambda x:r.or_(x['a'].eq(1), x['a']['b'].eq(2)), default=r.error())
|
||||
js: tbl.filter(function(x){return x('a').eq(1).or(x('a')('b').eq(2))}, {default:r.error()})
|
||||
ot: err("ReqlNonExistenceError", "No attribute `a` in object:", [])
|
||||
|
||||
- cd: r.table_drop('default_test')
|
||||
ot: partial({'tables_dropped':1})
|
@ -1,64 +0,0 @@
|
||||
desc: Test geo constructors
|
||||
tests:
|
||||
# Point
|
||||
- cd: r.point(0, 0)
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[0, 0], 'type':'Point'})
|
||||
- cd: r.point(0, -90)
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[0, -90], 'type':'Point'})
|
||||
- cd: r.point(0, 90)
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[0, 90], 'type':'Point'})
|
||||
- cd: r.point(-180, 0)
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[-180, 0], 'type':'Point'})
|
||||
- cd: r.point(180, 0)
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[180, 0], 'type':'Point'})
|
||||
- cd: r.point(0, -91)
|
||||
ot: err('ReqlQueryLogicError', 'Latitude must be between -90 and 90. Got -91.', [0])
|
||||
- cd: r.point(0, 91)
|
||||
ot: err('ReqlQueryLogicError', 'Latitude must be between -90 and 90. Got 91.', [0])
|
||||
- cd: r.point(-181, 0)
|
||||
ot: err('ReqlQueryLogicError', 'Longitude must be between -180 and 180. Got -181.', [0])
|
||||
- cd: r.point(181, 0)
|
||||
ot: err('ReqlQueryLogicError', 'Longitude must be between -180 and 180. Got 181.', [0])
|
||||
|
||||
# Line
|
||||
- cd: r.line()
|
||||
ot: err('ReqlCompileError', 'Expected 2 or more arguments but found 0.', [0])
|
||||
- cd: r.line([0,0])
|
||||
ot: err('ReqlCompileError', 'Expected 2 or more arguments but found 1.', [0])
|
||||
- cd: r.line([0,0], [0,0])
|
||||
ot: err('ReqlQueryLogicError', 'Invalid LineString. Are there antipodal or duplicate vertices?', [0])
|
||||
- cd: r.line([0,0], [0,1])
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[0,0], [0,1]], 'type':'LineString'})
|
||||
- cd: r.line([0,0], [1])
|
||||
ot: err('ReqlQueryLogicError', 'Expected point coordinate pair. Got 1 element array instead of a 2 element one.', [0])
|
||||
- cd: r.line([0,0], [1,0,0])
|
||||
ot: err('ReqlQueryLogicError', 'Expected point coordinate pair. Got 3 element array instead of a 2 element one.', [0])
|
||||
- cd: r.line([0,0], [0,1], [0,0])
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[0,0], [0,1], [0,0]], 'type':'LineString'})
|
||||
- cd: r.line(r.point(0,0), r.point(0,1), r.point(0,0))
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[0,0], [0,1], [0,0]], 'type':'LineString'})
|
||||
- cd: r.line(r.point(0,0), r.point(1,0), r.line([0,0], [1,0]))
|
||||
ot: err('ReqlQueryLogicError', 'Expected geometry of type `Point` but found `LineString`.', [0])
|
||||
|
||||
# Polygon
|
||||
- cd: r.polygon()
|
||||
ot: err('ReqlCompileError', 'Expected 3 or more arguments but found 0.', [0])
|
||||
- cd: r.polygon([0,0])
|
||||
ot: err('ReqlCompileError', 'Expected 3 or more arguments but found 1.', [0])
|
||||
- cd: r.polygon([0,0], [0,0])
|
||||
ot: err('ReqlCompileError', 'Expected 3 or more arguments but found 2.', [0])
|
||||
- cd: r.polygon([0,0], [0,0], [0,0], [0,0])
|
||||
ot: err('ReqlQueryLogicError', 'Invalid LinearRing. Are there antipodal or duplicate vertices? Is it self-intersecting?', [0])
|
||||
- cd: r.polygon([0,0], [0,1], [1,0])
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0,0], [0,1], [1,0], [0,0]]], 'type':'Polygon'})
|
||||
- cd: r.polygon([0,0], [0,1], [1,0], [0,0])
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0,0], [0,1], [1,0], [0,0]]], 'type':'Polygon'})
|
||||
- cd: r.polygon([0,0], [0,1], [1,0], [-1,0.5])
|
||||
ot: err('ReqlQueryLogicError', 'Invalid LinearRing. Are there antipodal or duplicate vertices? Is it self-intersecting?', [0])
|
||||
- cd: r.polygon([0,0], [0,1], [0])
|
||||
ot: err('ReqlQueryLogicError', 'Expected point coordinate pair. Got 1 element array instead of a 2 element one.', [0])
|
||||
- cd: r.polygon([0,0], [0,1], [0,1,0])
|
||||
ot: err('ReqlQueryLogicError', 'Expected point coordinate pair. Got 3 element array instead of a 2 element one.', [0])
|
||||
- cd: r.polygon(r.point(0,0), r.point(0,1), r.line([0,0], [0,1]))
|
||||
ot: err('ReqlQueryLogicError', 'Expected geometry of type `Point` but found `LineString`.', [0])
|
||||
|
@ -1,31 +0,0 @@
|
||||
desc: Test geoJSON conversion
|
||||
tests:
|
||||
# Basic conversion
|
||||
- cd: r.geojson({'coordinates':[0, 0], 'type':'Point'})
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[0, 0], 'type':'Point'})
|
||||
- cd: r.geojson({'coordinates':[[0,0], [0,1]], 'type':'LineString'})
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[0,0], [0,1]], 'type':'LineString'})
|
||||
- cd: r.geojson({'coordinates':[[[0,0], [0,1], [1,0], [0,0]]], 'type':'Polygon'})
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0,0], [0,1], [1,0], [0,0]]], 'type':'Polygon'})
|
||||
|
||||
# Wrong / missing fields
|
||||
- cd: r.geojson({'coordinates':[[], 0], 'type':'Point'})
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found ARRAY.', [0])
|
||||
- cd: r.geojson({'coordinates':true, 'type':'Point'})
|
||||
ot: err('ReqlQueryLogicError', 'Expected type ARRAY but found BOOL.', [0])
|
||||
- cd: r.geojson({'type':'Point'})
|
||||
ot: err('ReqlNonExistenceError', 'No attribute `coordinates` in object:', [0])
|
||||
- cd: r.geojson({'coordinates':[0, 0]})
|
||||
ot: err('ReqlNonExistenceError', 'No attribute `type` in object:', [0])
|
||||
- cd: r.geojson({'coordinates':[0, 0], 'type':'foo'})
|
||||
ot: err('ReqlQueryLogicError', 'Unrecognized GeoJSON type `foo`.', [0])
|
||||
- cd: r.geojson({'coordinates':[0, 0], 'type':'Point', 'foo':'wrong'})
|
||||
ot: err('ReqlQueryLogicError', 'Unrecognized field `foo` found in geometry object.', [0])
|
||||
|
||||
# Unsupported features
|
||||
- cd: r.geojson({'coordinates':[0, 0], 'type':'Point', 'crs':null})
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[0, 0], 'type':'Point', 'crs':null})
|
||||
- js: r.geojson({'coordinates':[0, 0], 'type':'Point', 'crs':{'type':'name', 'properties':{'name':'test'}}})
|
||||
ot: err('ReqlQueryLogicError', 'Non-default coordinate reference systems are not supported in GeoJSON objects. Make sure the `crs` field of the geometry is null or non-existent.', [0])
|
||||
- cd: r.geojson({'coordinates':[0, 0], 'type':'MultiPoint'})
|
||||
ot: err('ReqlQueryLogicError', 'GeoJSON type `MultiPoint` is not supported.', [0])
|
@ -1,208 +0,0 @@
|
||||
desc: Test ReQL interface to geo indexes
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- def: rows = [{'id':0, 'g':r.point(10,10), 'm':[r.point(0,0),r.point(1,0),r.point(2,0)]},
|
||||
{'id':1, 'g':r.polygon([0,0], [0,1], [1,1], [1,0])},
|
||||
{'id':2, 'g':r.line([0.000002,-1], [-0.000001,1])}]
|
||||
|
||||
- cd: tbl.insert(rows)
|
||||
ot: ({'deleted':0,'inserted':3,'skipped':0,'errors':0,'replaced':0,'unchanged':0})
|
||||
|
||||
- rb: tbl.index_create('g', :geo=>true)
|
||||
py: tbl.index_create('g', geo=true)
|
||||
js: tbl.indexCreate('g', {'geo':true})
|
||||
ot: {'created':1}
|
||||
- rb: tbl.index_create('m', :geo=>true, :multi=>true)
|
||||
py: tbl.index_create('m', geo=true, multi=true)
|
||||
js: tbl.indexCreate('m', {'geo':true, 'multi':true})
|
||||
ot: {'created':1}
|
||||
- cd: tbl.index_create('other')
|
||||
ot: {'created':1}
|
||||
# r.point is deterministic and can be used in an index function
|
||||
- rb: tbl.index_create('point_det'){ |x| r.point(x, x) }
|
||||
py: tbl.index_create('point_det', lambda x: r.point(x, x) )
|
||||
js: tbl.indexCreate('point_det', function(x) {return r.point(x, x);} )
|
||||
ot: {'created':1}
|
||||
|
||||
- cd: tbl.index_wait()
|
||||
|
||||
# r.line (and friends) are non-deterministic across servers and should be disallowed
|
||||
# in index functions
|
||||
- rb: tbl.index_create('point_det'){ |x| r.line(x, x) }
|
||||
py: tbl.index_create('point_det', lambda x: r.line(x, x) )
|
||||
js: tbl.indexCreate('point_det', function(x) {return r.line(x, x);} )
|
||||
ot: err('ReqlQueryLogicError', 'Could not prove function deterministic. Index functions must be deterministic.')
|
||||
|
||||
- js: tbl.get_intersecting(r.point(0,0), {'index':'other'}).count()
|
||||
py: tbl.get_intersecting(r.point(0,0), index='other').count()
|
||||
rb: tbl.get_intersecting(r.point(0,0), :index=>'other').count()
|
||||
ot: err('ReqlQueryLogicError', 'Index `other` is not a geospatial index. get_intersecting can only be used with a geospatial index.', [0])
|
||||
- js: tbl.get_intersecting(r.point(0,0), {'index':'missing'}).count()
|
||||
py: tbl.get_intersecting(r.point(0,0), index='missing').count()
|
||||
rb: tbl.get_intersecting(r.point(0,0), :index=>'missing').count()
|
||||
ot: err_regex('ReqlOpFailedError', 'Index `missing` was not found on table `[a-zA-Z0-9_]+.[a-zA-Z0-9_]+`[.]', [0])
|
||||
- cd: tbl.get_intersecting(r.point(0,0)).count()
|
||||
ot: err('ReqlQueryLogicError', 'get_intersecting requires an index argument.', [0])
|
||||
- js: tbl.get_all(0, {'index':'g'}).count()
|
||||
py: tbl.get_all(0, index='g').count()
|
||||
rb: tbl.get_all(0, :index=>'g').count()
|
||||
ot: err('ReqlQueryLogicError', 'Index `g` is a geospatial index. Only get_nearest and get_intersecting can use a geospatial index.', [0])
|
||||
- js: tbl.between(0, 1, {'index':'g'}).count()
|
||||
py: tbl.between(0, 1, index='g').count()
|
||||
rb: tbl.between(0, 1, :index=>'g').count()
|
||||
ot: err('ReqlQueryLogicError', 'Index `g` is a geospatial index. Only get_nearest and get_intersecting can use a geospatial index.', [0])
|
||||
- js: tbl.order_by({'index':'g'}).count()
|
||||
py: tbl.order_by(index='g').count()
|
||||
rb: tbl.order_by(:index=>'g').count()
|
||||
ot: err('ReqlQueryLogicError', 'Index `g` is a geospatial index. Only get_nearest and get_intersecting can use a geospatial index.', [0])
|
||||
- js: tbl.between(0, 1).get_intersecting(r.point(0,0), {'index':'g'}).count()
|
||||
py: tbl.between(0, 1).get_intersecting(r.point(0,0), index='g').count()
|
||||
rb: tbl.between(0, 1).get_intersecting(r.point(0,0), :index=>'g').count()
|
||||
ot:
|
||||
cd: err('ReqlQueryLogicError', 'Expected type TABLE but found TABLE_SLICE:', [0])
|
||||
py: err('AttributeError', "'Between' object has no attribute 'get_intersecting'")
|
||||
- js: tbl.get_all(0).get_intersecting(r.point(0,0), {'index':'g'}).count()
|
||||
py: tbl.get_all(0).get_intersecting(r.point(0,0), index='g').count()
|
||||
rb: tbl.get_all(0).get_intersecting(r.point(0,0), :index=>'g').count()
|
||||
ot:
|
||||
cd: err('ReqlQueryLogicError', 'Expected type TABLE but found SELECTION:', [0])
|
||||
py: err('AttributeError', "'GetAll' object has no attribute 'get_intersecting'")
|
||||
- js: tbl.order_by({'index':'id'}).get_intersecting(r.point(0,0), {'index':'g'}).count()
|
||||
py: tbl.order_by(index='id').get_intersecting(r.point(0,0), index='g').count()
|
||||
rb: tbl.order_by(:index=>'id').get_intersecting(r.point(0,0), :index=>'g').count()
|
||||
ot:
|
||||
cd: err('ReqlQueryLogicError', 'Expected type TABLE but found TABLE_SLICE:', [0])
|
||||
py: err('AttributeError', "'OrderBy' object has no attribute 'get_intersecting'")
|
||||
- js: tbl.get_intersecting(r.point(0,0), {'index':'id'}).count()
|
||||
py: tbl.get_intersecting(r.point(0,0), index='id').count()
|
||||
rb: tbl.get_intersecting(r.point(0,0), :index=>'id').count()
|
||||
ot: err('ReqlQueryLogicError', 'get_intersecting cannot use the primary index.', [0])
|
||||
|
||||
- js: tbl.get_intersecting(r.point(0,0), {'index':'g'}).count()
|
||||
py: tbl.get_intersecting(r.point(0,0), index='g').count()
|
||||
rb: tbl.get_intersecting(r.point(0,0), :index=>'g').count()
|
||||
ot: 1
|
||||
- js: tbl.get_intersecting(r.point(10,10), {'index':'g'}).count()
|
||||
py: tbl.get_intersecting(r.point(10,10), index='g').count()
|
||||
rb: tbl.get_intersecting(r.point(10,10), :index=>'g').count()
|
||||
ot: 1
|
||||
- js: tbl.get_intersecting(r.point(0.5,0.5), {'index':'g'}).count()
|
||||
py: tbl.get_intersecting(r.point(0.5,0.5), index='g').count()
|
||||
rb: tbl.get_intersecting(r.point(0.5,0.5), :index=>'g').count()
|
||||
ot: 1
|
||||
- js: tbl.get_intersecting(r.point(20,20), {'index':'g'}).count()
|
||||
py: tbl.get_intersecting(r.point(20,20), index='g').count()
|
||||
rb: tbl.get_intersecting(r.point(20,20), :index=>'g').count()
|
||||
ot: 0
|
||||
- js: tbl.get_intersecting(r.polygon([0,0], [1,0], [1,1], [0,1]), {'index':'g'}).count()
|
||||
py: tbl.get_intersecting(r.polygon([0,0], [1,0], [1,1], [0,1]), index='g').count()
|
||||
rb: tbl.get_intersecting(r.polygon([0,0], [1,0], [1,1], [0,1]), :index=>'g').count()
|
||||
ot: 2
|
||||
- js: tbl.get_intersecting(r.line([0,0], [10,10]), {'index':'g'}).count()
|
||||
py: tbl.get_intersecting(r.line([0,0], [10,10]), index='g').count()
|
||||
rb: tbl.get_intersecting(r.line([0,0], [10,10]), :index=>'g').count()
|
||||
ot: 3
|
||||
- js: tbl.get_intersecting(r.point(0,0), {'index':'g'}).type_of()
|
||||
py: tbl.get_intersecting(r.point(0,0), index='g').type_of()
|
||||
rb: tbl.get_intersecting(r.point(0,0), :index=>'g').type_of()
|
||||
ot: ("SELECTION<STREAM>")
|
||||
- js: tbl.get_intersecting(r.point(0,0), {'index':'g'}).filter(true).type_of()
|
||||
py: tbl.get_intersecting(r.point(0,0), index='g').filter(true).type_of()
|
||||
rb: tbl.get_intersecting(r.point(0,0), :index=>'g').filter(true).type_of()
|
||||
ot: ("SELECTION<STREAM>")
|
||||
- js: tbl.get_intersecting(r.point(0,0), {'index':'g'}).map(r.row).type_of()
|
||||
py: tbl.get_intersecting(r.point(0,0), index='g').map(r.row).type_of()
|
||||
rb: tbl.get_intersecting(r.point(0,0), :index=>'g').map{|x|x}.type_of()
|
||||
ot: ("STREAM")
|
||||
|
||||
- js: tbl.get_intersecting(r.point(0,0), {'index':'m'}).count()
|
||||
py: tbl.get_intersecting(r.point(0,0), index='m').count()
|
||||
rb: tbl.get_intersecting(r.point(0,0), :index=>'m').count()
|
||||
ot: 1
|
||||
- js: tbl.get_intersecting(r.point(1,0), {'index':'m'}).count()
|
||||
py: tbl.get_intersecting(r.point(1,0), index='m').count()
|
||||
rb: tbl.get_intersecting(r.point(1,0), :index=>'m').count()
|
||||
ot: 1
|
||||
- js: tbl.get_intersecting(r.point(2,0), {'index':'m'}).count()
|
||||
py: tbl.get_intersecting(r.point(2,0), index='m').count()
|
||||
rb: tbl.get_intersecting(r.point(2,0), :index=>'m').count()
|
||||
ot: 1
|
||||
- js: tbl.get_intersecting(r.point(3,0), {'index':'m'}).count()
|
||||
py: tbl.get_intersecting(r.point(3,0), index='m').count()
|
||||
rb: tbl.get_intersecting(r.point(3,0), :index=>'m').count()
|
||||
ot: 0
|
||||
# The document is emitted once for each match.
|
||||
- js: tbl.get_intersecting(r.polygon([0,0], [0,1], [1,1], [1,0]), {'index':'m'}).count()
|
||||
py: tbl.get_intersecting(r.polygon([0,0], [0,1], [1,1], [1,0]), index='m').count()
|
||||
rb: tbl.get_intersecting(r.polygon([0,0], [0,1], [1,1], [1,0]), :index=>'m').count()
|
||||
ot: 2
|
||||
|
||||
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'other'})
|
||||
py: tbl.get_nearest(r.point(0,0), index='other')
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'other')
|
||||
ot: err('ReqlQueryLogicError', 'Index `other` is not a geospatial index. get_nearest can only be used with a geospatial index.', [0])
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'missing'})
|
||||
py: tbl.get_nearest(r.point(0,0), index='missing')
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'missing')
|
||||
ot: err_regex('ReqlOpFailedError', 'Index `missing` was not found on table `[a-zA-Z0-9_]+.[a-zA-Z0-9_]+`[.]', [0])
|
||||
- cd: tbl.get_nearest(r.point(0,0))
|
||||
ot: err('ReqlQueryLogicError', 'get_nearest requires an index argument.', [0])
|
||||
- js: tbl.between(0, 1).get_nearest(r.point(0,0), {'index':'g'}).count()
|
||||
py: tbl.between(0, 1).get_nearest(r.point(0,0), index='g').count()
|
||||
rb: tbl.between(0, 1).get_nearest(r.point(0,0), :index=>'g').count()
|
||||
ot:
|
||||
cd: err('ReqlQueryLogicError', 'Expected type TABLE but found TABLE_SLICE:', [0])
|
||||
py: err('AttributeError', "'Between' object has no attribute 'get_nearest'")
|
||||
- js: tbl.get_all(0).get_nearest(r.point(0,0), {'index':'g'}).count()
|
||||
py: tbl.get_all(0).get_nearest(r.point(0,0), index='g').count()
|
||||
rb: tbl.get_all(0).get_nearest(r.point(0,0), :index=>'g').count()
|
||||
ot:
|
||||
cd: err('ReqlQueryLogicError', 'Expected type TABLE but found SELECTION:', [0])
|
||||
py: err('AttributeError', "'GetAll' object has no attribute 'get_nearest'")
|
||||
- js: tbl.order_by({'index':'id'}).get_nearest(r.point(0,0), {'index':'g'}).count()
|
||||
py: tbl.order_by(index='id').get_nearest(r.point(0,0), index='g').count()
|
||||
rb: tbl.order_by(:index=>'id').get_nearest(r.point(0,0), :index=>'g').count()
|
||||
ot:
|
||||
cd: err('ReqlQueryLogicError', 'Expected type TABLE but found TABLE_SLICE:', [0])
|
||||
py: err('AttributeError', "'OrderBy' object has no attribute 'get_nearest'")
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'id'}).count()
|
||||
py: tbl.get_nearest(r.point(0,0), index='id').count()
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'id').count()
|
||||
ot: err('ReqlQueryLogicError', 'get_nearest cannot use the primary index.', [0])
|
||||
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'g'}).pluck('dist', {'doc':'id'})
|
||||
py: tbl.get_nearest(r.point(0,0), index='g').pluck('dist', {'doc':'id'})
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'g').pluck('dist', {'doc':'id'})
|
||||
ot: ([{'dist':0,'doc':{'id':1}},{'dist':0.055659745396754216,'doc':{'id':2}}])
|
||||
- js: tbl.get_nearest(r.point(-0.000001,1), {'index':'g'}).pluck('dist', {'doc':'id'})
|
||||
py: tbl.get_nearest(r.point(-0.000001,1), index='g').pluck('dist', {'doc':'id'})
|
||||
rb: tbl.get_nearest(r.point(-0.000001,1), :index=>'g').pluck('dist', {'doc':'id'})
|
||||
ot: ([{'dist':0,'doc':{'id':2}},{'dist':0.11130264976984369,'doc':{'id':1}}])
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'g', 'max_dist':1565110}).pluck('dist', {'doc':'id'})
|
||||
py: tbl.get_nearest(r.point(0,0), index='g', max_dist=1565110).pluck('dist', {'doc':'id'})
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'g', :max_dist=>1565110).pluck('dist', {'doc':'id'})
|
||||
ot: ([{'dist':0,'doc':{'id':1}},{'dist':0.055659745396754216,'doc':{'id':2}},{'dist':1565109.0992178896,'doc':{'id':0}}])
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'g', 'max_dist':1565110, 'max_results':2}).pluck('dist', {'doc':'id'})
|
||||
py: tbl.get_nearest(r.point(0,0), index='g', max_dist=1565110, max_results=2).pluck('dist', {'doc':'id'})
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'g', :max_dist=>1565110, :max_results=>2).pluck('dist', {'doc':'id'})
|
||||
ot: ([{'dist':0,'doc':{'id':1}},{'dist':0.055659745396754216,'doc':{'id':2}}])
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'g', 'max_dist':10000000}).pluck('dist', {'doc':'id'})
|
||||
py: tbl.get_nearest(r.point(0,0), index='g', max_dist=10000000).pluck('dist', {'doc':'id'})
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'g', :max_dist=>10000000).pluck('dist', {'doc':'id'})
|
||||
ot: err('ReqlQueryLogicError', 'The distance has become too large for continuing the indexed nearest traversal. Consider specifying a smaller `max_dist` parameter. (Radius must be smaller than a quarter of the circumference along the minor axis of the reference ellipsoid. Got 10968937.995244588703m, but must be smaller than 9985163.1855612862855m.)', [0])
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'g', 'max_dist':1566, 'unit':'km'}).pluck('dist', {'doc':'id'})
|
||||
py: tbl.get_nearest(r.point(0,0), index='g', max_dist=1566, unit='km').pluck('dist', {'doc':'id'})
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'g', :max_dist=>1566, :unit=>'km').pluck('dist', {'doc':'id'})
|
||||
ot: ([{'dist':0,'doc':{'id':1}},{'dist':0.00005565974539675422,'doc':{'id':2}},{'dist':1565.1090992178897,'doc':{'id':0}}])
|
||||
- py: tbl.get_nearest(r.point(0,0), index='g', max_dist=1, geo_system='unit_sphere').pluck('dist', {'doc':'id'})
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'g', :max_dist=>1, :geo_system=>'unit_sphere').pluck('dist', {'doc':'id'})
|
||||
ot: ([{'dist':0, 'doc':{'id':1}}, {'dist':8.726646259990191e-09, 'doc':{'id':2}}, {'dist':0.24619691677893205, 'doc':{'id':0}}])
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'g'}).type_of()
|
||||
py: tbl.get_nearest(r.point(0,0), index='g').type_of()
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'g').type_of()
|
||||
ot: ("ARRAY")
|
||||
- js: tbl.get_nearest(r.point(0,0), {'index':'g'}).map(r.row).type_of()
|
||||
py: tbl.get_nearest(r.point(0,0), index='g').map(r.row).type_of()
|
||||
rb: tbl.get_nearest(r.point(0,0), :index=>'g').map{|x|x}.type_of()
|
||||
ot: ("ARRAY")
|
@ -1,119 +0,0 @@
|
||||
desc: Test intersects and includes semantics
|
||||
tests:
|
||||
# Intersects
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.point(1.5,1.5))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.point(2.5,2.5))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).intersects(r.point(1.5,1.5))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).intersects(r.point(1.05,1.05))
|
||||
ot: true
|
||||
# Our current semantics: we define polygons as closed, so points that are exactly *on* the outline of a polygon do intersect
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.point(2,2))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.point(2,1.5))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.line([1.5,1.5], [2,2]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.line([1.5,1.5], [2,1.5]))
|
||||
ot: true
|
||||
# (...with holes in the polygon being closed with respect to the polygon, i.e. the set cut out is open)
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).intersects(r.point(1.1,1.1))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).intersects(r.point(1.5,1.1))
|
||||
ot: true
|
||||
# ... lines are interpreted as closed sets as well, so even if they meet only at their end points, we consider them as intersecting.
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.line([2,2], [3,3]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.line([2,1.5], [3,3]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.line([1.5,1.5], [3,3]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.polygon([1.2,1.2], [1.8,1.2], [1.8,1.8], [1.2,1.8]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.polygon([1.5,1.5], [2.5,1.5], [2.5,2.5], [1.5,2.5]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).intersects(r.polygon([1.2,1.2], [1.8,1.2], [1.8,1.8], [1.2,1.8]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).intersects(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9]))
|
||||
ot: false
|
||||
# Polygons behave like lines in that respect
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.polygon([2,1.1], [3,1.1], [3,1.9], [2,1.9]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).intersects(r.polygon([2,2], [3,2], [3,3], [2,3]))
|
||||
ot: false
|
||||
- cd: r.point(1,1).intersects(r.point(1.5,1.5))
|
||||
ot: false
|
||||
- cd: r.point(1,1).intersects(r.point(1,1))
|
||||
ot: true
|
||||
- cd: r.line([1,1], [2,1]).intersects(r.point(1,1))
|
||||
ot: true
|
||||
# This one currently fails due to numeric precision problems.
|
||||
#- cd: r.line([1,0], [2,0]).intersects(r.point(1.5,0))
|
||||
# ot: true
|
||||
- cd: r.line([1,1], [1,2]).intersects(r.point(1,1.8))
|
||||
ot: true
|
||||
- cd: r.line([1,0], [2,0]).intersects(r.point(1.8,0))
|
||||
ot: true
|
||||
- cd: r.line([1,1], [2,1]).intersects(r.point(1.5,1.5))
|
||||
ot: false
|
||||
- cd: r.line([1,1], [2,1]).intersects(r.line([2,1], [3,1]))
|
||||
ot: true
|
||||
# intersects on an array/stream
|
||||
- cd: r.expr([r.point(1, 0), r.point(3,0), r.point(2, 0)]).intersects(r.line([0,0], [2, 0])).count()
|
||||
ot: 2
|
||||
|
||||
# Includes
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.point(1.5,1.5))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.point(2.5,2.5))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).includes(r.point(1.5,1.5))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).includes(r.point(1.05,1.05))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.point(2,2))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.point(2,1.5))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.line([1.5,1.5], [2,2]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.line([1.5,1.5], [2,1.5]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).includes(r.point(1.1,1.1))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).includes(r.point(1.5,1.1))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.line([2,2], [3,3]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.line([2,1.5], [2,2]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.line([2,1], [2,2]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.line([1.5,1.5], [3,3]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.polygon([1,1], [2,1], [2,2], [1,2]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.polygon([1.2,1.2], [1.8,1.2], [1.8,1.8], [1.2,1.8]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.polygon([1.5,1.5], [2,1.5], [2,2], [1.5,2]))
|
||||
ot: true
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.polygon([1.5,1.5], [2.5,1.5], [2.5,2.5], [1.5,2.5]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).includes(r.polygon([1.2,1.2], [1.8,1.2], [1.8,1.8], [1.2,1.8]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).polygon_sub(r.polygon([1.1,1.1], [1.9,1.1], [1.9,1.9], [1.1,1.9])).includes(r.polygon([1.1,1.1], [2,1.1], [2,2], [1.1,2]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.polygon([2,1.1], [3,1.1], [3,1.9], [2,1.9]))
|
||||
ot: false
|
||||
- cd: r.polygon([1,1], [2,1], [2,2], [1,2]).includes(r.polygon([2,2], [3,2], [3,3], [2,3]))
|
||||
ot: false
|
||||
# includes on an array/stream
|
||||
- cd: r.expr([r.polygon([0,0], [1,1], [1,0]), r.polygon([0,1], [1,2], [1,1])]).includes(r.point(0,0)).count()
|
||||
ot: 1
|
||||
# Wrong geometry type arguments (the first one must be a polygon)
|
||||
- cd: r.point(0,0).includes(r.point(0,0))
|
||||
ot: err('ReqlQueryLogicError', 'Expected geometry of type `Polygon` but found `Point`.')
|
||||
- cd: r.line([0,0], [0,1]).includes(r.point(0,0))
|
||||
ot: err('ReqlQueryLogicError', 'Expected geometry of type `Polygon` but found `LineString`.')
|
@ -1,97 +0,0 @@
|
||||
desc: Test basic geometry operators
|
||||
tests:
|
||||
# Distance
|
||||
# coerce_to('STRING') because the test utility has some issues with rounding and I'm too lazy to investigate that now.
|
||||
- cd: r.distance(r.point(-122, 37), r.point(-123, 37)).coerce_to('STRING')
|
||||
ot: ("89011.26253835332")
|
||||
- cd: r.distance(r.point(-122, 37), r.point(-122, 36)).coerce_to('STRING')
|
||||
ot: ("110968.30443995494")
|
||||
- cd: r.distance(r.point(-122, 37), r.point(-122, 36)).eq(r.distance(r.point(-122, 36), r.point(-122, 37)))
|
||||
ot: true
|
||||
- cd: r.point(-122, 37).distance(r.point(-123, 37)).coerce_to('STRING')
|
||||
ot: ("89011.26253835332")
|
||||
- def: someDist = r.distance(r.point(-122, 37), r.point(-123, 37))
|
||||
js: someDist.eq(r.distance(r.point(-122, 37), r.point(-123, 37), {unit:'m'}))
|
||||
py: someDist.eq(r.distance(r.point(-122, 37), r.point(-123, 37), unit='m'))
|
||||
rb: someDist.eq(r.distance(r.point(-122, 37), r.point(-123, 37), :unit=>'m'))
|
||||
ot: true
|
||||
- js: someDist.mul(1.0/1000.0).eq(r.distance(r.point(-122, 37), r.point(-123, 37), {unit:'km'}))
|
||||
py: someDist.mul(1.0/1000.0).eq(r.distance(r.point(-122, 37), r.point(-123, 37), unit='km'))
|
||||
rb: someDist.mul(1.0/1000.0).eq(r.distance(r.point(-122, 37), r.point(-123, 37), :unit=>'km'))
|
||||
ot: true
|
||||
- js: someDist.mul(1.0/1609.344).eq(r.distance(r.point(-122, 37), r.point(-123, 37), {unit:'mi'}))
|
||||
py: someDist.mul(1.0/1609.344).eq(r.distance(r.point(-122, 37), r.point(-123, 37), unit='mi'))
|
||||
rb: someDist.mul(1.0/1609.344).eq(r.distance(r.point(-122, 37), r.point(-123, 37), :unit=>'mi'))
|
||||
ot: true
|
||||
- js: someDist.mul(1.0/0.3048).eq(r.distance(r.point(-122, 37), r.point(-123, 37), {unit:'ft'}))
|
||||
py: someDist.mul(1.0/0.3048).eq(r.distance(r.point(-122, 37), r.point(-123, 37), unit='ft'))
|
||||
rb: someDist.mul(1.0/0.3048).eq(r.distance(r.point(-122, 37), r.point(-123, 37), :unit=>'ft'))
|
||||
ot: true
|
||||
- js: someDist.mul(1.0/1852.0).eq(r.distance(r.point(-122, 37), r.point(-123, 37), {unit:'nm'}))
|
||||
py: someDist.mul(1.0/1852.0).eq(r.distance(r.point(-122, 37), r.point(-123, 37), unit='nm'))
|
||||
rb: someDist.mul(1.0/1852.0).eq(r.distance(r.point(-122, 37), r.point(-123, 37), :unit=>'nm'))
|
||||
ot: true
|
||||
- js: someDist.eq(r.distance(r.point(-122, 37), r.point(-123, 37), {'geo_system':'WGS84'}))
|
||||
py: someDist.eq(r.distance(r.point(-122, 37), r.point(-123, 37), geo_system='WGS84'))
|
||||
rb: someDist.eq(r.distance(r.point(-122, 37), r.point(-123, 37), :geo_system=>'WGS84'))
|
||||
ot: true
|
||||
# Mearth is a small planet, just 1/10th of earth's size.
|
||||
- js: someDist.div(10).eq(r.distance(r.point(-122, 37), r.point(-123, 37), {'geo_system':{'a':637813.7, 'f':(1.0/298.257223563)}}))
|
||||
py: someDist.div(10).eq(r.distance(r.point(-122, 37), r.point(-123, 37), geo_system={'a':637813.7, 'f':(1.0/298.257223563)}))
|
||||
rb: someDist.div(10).eq(r.distance(r.point(-122, 37), r.point(-123, 37), :geo_system=>{'a':637813.7, 'f':(1.0/298.257223563)}))
|
||||
ot: true
|
||||
- py: r.distance(r.point(-122, 37), r.point(-123, 37), geo_system='unit_sphere').coerce_to('STRING')
|
||||
rb: r.distance(r.point(-122, 37), r.point(-123, 37), :geo_system=>'unit_sphere').coerce_to('STRING')
|
||||
js: r.distance(r.point(-122, 37), r.point(-123, 37), {'geo_system':'unit_sphere'}).coerce_to('STRING')
|
||||
ot: ("0.01393875509649327")
|
||||
- cd: r.distance(r.point(0, 0), r.point(0, 0)).coerce_to('STRING')
|
||||
ot: ("0")
|
||||
# These two give the earth's circumference through the poles
|
||||
- cd: r.distance(r.point(0, 0), r.point(180, 0)).mul(2).coerce_to('STRING')
|
||||
ot: ("40007862.917250897")
|
||||
- cd: r.distance(r.point(0, -90), r.point(0, 90)).mul(2).coerce_to('STRING')
|
||||
ot: ("40007862.917250897")
|
||||
- cd: r.distance(r.point(0, 0), r.line([0,0], [0,1])).coerce_to('STRING')
|
||||
ot: ("0")
|
||||
- cd: r.distance(r.line([0,0], [0,1]), r.point(0, 0)).coerce_to('STRING')
|
||||
ot: ("0")
|
||||
- cd: r.distance(r.point(0, 0), r.line([0.1,0], [1,0])).eq(r.distance(r.point(0, 0), r.point(0.1, 0)))
|
||||
ot: true
|
||||
- cd: r.distance(r.point(0, 0), r.line([5,-1], [4,2])).coerce_to('STRING')
|
||||
ot: ("492471.4990055255")
|
||||
- cd: r.distance(r.point(0, 0), r.polygon([5,-1], [4,2], [10,10])).coerce_to('STRING')
|
||||
ot: ("492471.4990055255")
|
||||
- cd: r.distance(r.point(0, 0), r.polygon([0,-1], [0,1], [10,10])).coerce_to('STRING')
|
||||
ot: ("0")
|
||||
- cd: r.distance(r.point(0.5, 0.5), r.polygon([0,-1], [0,1], [10,10])).coerce_to('STRING')
|
||||
ot: ("0")
|
||||
|
||||
# Fill
|
||||
- js: r.circle([0,0], 1, {fill:false}).eq(r.circle([0,0], 1, {fill:true}))
|
||||
py: r.circle([0,0], 1, fill=false).eq(r.circle([0,0], 1, fill=true))
|
||||
rb: r.circle([0,0], 1, :fill=>false).eq(r.circle([0,0], 1, :fill=>true))
|
||||
ot: false
|
||||
- js: r.circle([0,0], 1, {fill:false}).fill().eq(r.circle([0,0], 1, {fill:true}))
|
||||
py: r.circle([0,0], 1, fill=false).fill().eq(r.circle([0,0], 1, fill=true))
|
||||
rb: r.circle([0,0], 1, :fill=>false).fill().eq(r.circle([0,0], 1, :fill=>true))
|
||||
ot: true
|
||||
|
||||
# Subtraction
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.polygon([0.1,0.1], [0.9,0.1], [0.9,0.9], [0.1,0.9]))
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0,0],[1,0],[1,1],[0,1],[0,0]],[[0.1,0.1],[0.9,0.1],[0.9,0.9],[0.1,0.9],[0.1,0.1]]], 'type':'Polygon'})
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.polygon([0.1,0.9], [0.9,0.0], [0.9,0.9], [0.1,0.9]))
|
||||
ot: err('ReqlQueryLogicError', 'The second argument to `polygon_sub` is not contained in the first one.', [0])
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.polygon([0,0], [2,0], [2,2], [0,2]))
|
||||
ot: err('ReqlQueryLogicError', 'The second argument to `polygon_sub` is not contained in the first one.', [0])
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.polygon([0,-2], [1,-2], [-1,1], [0,-1]))
|
||||
ot: err('ReqlQueryLogicError', 'The second argument to `polygon_sub` is not contained in the first one.', [0])
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.polygon([0,-1], [1,-1], [1,0], [0,0]))
|
||||
ot: err('ReqlQueryLogicError', 'The second argument to `polygon_sub` is not contained in the first one.', [0])
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.polygon([0.1,-1], [0.9,-1], [0.9,0.5], [0.1,0.5]))
|
||||
ot: err('ReqlQueryLogicError', 'The second argument to `polygon_sub` is not contained in the first one.', [0])
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.polygon([0,0],[0.1,0.9],[0.9,0.9],[0.9,0.1]))
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0,0],[1,0],[1,1],[0,1],[0,0]],[[0,0],[0.1,0.9],[0.9,0.9],[0.9,0.1],[0,0]]], 'type':'Polygon'})
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.polygon([0,0],[0.1,0.9],[0.9,0.9],[0.9,0.1]).polygon_sub(r.polygon([0.2,0.2],[0.5,0.8],[0.8,0.2])))
|
||||
ot: err('ReqlQueryLogicError', 'Expected a Polygon with only an outer shell. This one has holes.', [0])
|
||||
- cd: r.polygon([0,0], [1,0], [1,1], [0,1]).polygon_sub(r.line([0,0],[0.9,0.1],[0.9,0.9],[0.1,0.9]))
|
||||
ot: err('ReqlQueryLogicError', 'Expected a Polygon but found a LineString.', [])
|
@ -1,50 +0,0 @@
|
||||
desc: Test geometric primitive constructors
|
||||
tests:
|
||||
# Circle
|
||||
- js: r.circle([0,0], 1, {num_vertices:3})
|
||||
py: r.circle([0,0], 1, num_vertices=3)
|
||||
rb: r.circle([0,0], 1, :num_vertices=>3)
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0, -9.04369477050382e-06], [-7.779638566553426e-06, 4.5218473852518965e-06], [7.779638566553426e-06, 4.5218473852518965e-06], [0, -9.04369477050382e-06]]], 'type':'Polygon'})
|
||||
|
||||
- js: r.circle(r.point(0,0), 1, {num_vertices:3})
|
||||
py: r.circle(r.point(0,0), 1, num_vertices=3)
|
||||
rb: r.circle(r.point(0,0), 1, :num_vertices=>3)
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0, -9.04369477050382e-06], [-7.779638566553426e-06, 4.5218473852518965e-06], [7.779638566553426e-06, 4.5218473852518965e-06], [0, -9.04369477050382e-06]]], 'type':'Polygon'})
|
||||
|
||||
- js: r.circle([0,0], 1, {num_vertices:3, fill:false})
|
||||
py: r.circle([0,0], 1, num_vertices=3, fill=false)
|
||||
rb: r.circle([0,0], 1, :num_vertices=>3, :fill=>false)
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[0, -9.04369477050382e-06], [-7.779638566553426e-06, 4.5218473852518965e-06], [7.779638566553426e-06, 4.5218473852518965e-06], [0, -9.04369477050382e-06]], 'type':'LineString'})
|
||||
|
||||
- js: r.circle([0,0], 14000000, {num_vertices:3})
|
||||
py: r.circle([0,0], 14000000, num_vertices=3)
|
||||
rb: r.circle([0,0], 14000000, :num_vertices=>3)
|
||||
ot: err('ReqlQueryLogicError', 'Radius must be smaller than a quarter of the circumference along the minor axis of the reference ellipsoid. Got 14000000m, but must be smaller than 9985163.1855612862855m.', [0])
|
||||
|
||||
- js: r.circle([0,0], 1, {num_vertices:3, geo_system:'WGS84'})
|
||||
py: r.circle([0,0], 1, num_vertices=3, geo_system='WGS84')
|
||||
rb: r.circle([0,0], 1, :num_vertices=>3, :geo_system=>'WGS84')
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0, -9.04369477050382e-06], [-7.779638566553426e-06, 4.5218473852518965e-06], [7.779638566553426e-06, 4.5218473852518965e-06], [0, -9.04369477050382e-06]]], 'type':'Polygon'})
|
||||
|
||||
- js: r.circle([0,0], 2, {num_vertices:3, geo_system:'unit_'+'sphere'})
|
||||
py: r.circle([0,0], 2, num_vertices=3, geo_system='unit_sphere')
|
||||
rb: r.circle([0,0], 2, :num_vertices=>3, :geo_system=>'unit_sphere')
|
||||
ot: err('ReqlQueryLogicError', 'Radius must be smaller than a quarter of the circumference along the minor axis of the reference ellipsoid. Got 2m, but must be smaller than 1.570796326794896558m.', [0])
|
||||
|
||||
- js: r.circle([0,0], 0.1, {num_vertices:3, geo_system:'unit_'+'sphere'})
|
||||
py: r.circle([0,0], 0.1, num_vertices=3, geo_system='unit_sphere')
|
||||
rb: r.circle([0,0], 0.1, :num_vertices=>3, :geo_system=>'unit_sphere')
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0, -5.729577951308232], [-4.966092947444857, 2.861205754495701], [4.966092947444857, 2.861205754495701], [0, -5.729577951308232]]], 'type':'Polygon'})
|
||||
testopts:
|
||||
precision: 0.0000000000001
|
||||
|
||||
- js: r.circle([0,0], 1.0/1000.0, {num_vertices:3, unit:'km'})
|
||||
py: r.circle([0,0], 1.0/1000.0, num_vertices=3, unit='km')
|
||||
rb: r.circle([0,0], 1.0/1000.0, :num_vertices=>3, :unit=>'km')
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0, -9.04369477050382e-06], [-7.779638566553426e-06, 4.5218473852518965e-06], [7.779638566553426e-06, 4.5218473852518965e-06], [0, -9.04369477050382e-06]]], 'type':'Polygon'})
|
||||
|
||||
- js: r.circle([0,0], 1.0/1609.344, {num_vertices:3, unit:'mi'})
|
||||
py: r.circle([0,0], 1.0/1609.344, num_vertices=3, unit='mi')
|
||||
rb: r.circle([0,0], 1.0/1609.344, :num_vertices=>3, :unit=>'mi')
|
||||
ot: ({'$reql_type$':'GEOMETRY', 'coordinates':[[[0, -9.04369477050382e-06], [-7.779638566553426e-06, 4.5218473852518965e-06], [7.779638566553426e-06, 4.5218473852518965e-06], [0, -9.04369477050382e-06]]], 'type':'Polygon'})
|
||||
|
@ -1,133 +0,0 @@
|
||||
desc: Tests that manipulation data in tables
|
||||
table_variable_name: tbl, tbl2, senders, receivers, messages, otbl, otbl2
|
||||
tests:
|
||||
|
||||
# Setup some more tables
|
||||
|
||||
- py: r.db('test').table_create('test3', primary_key='foo')
|
||||
rb: r.db('test').table_create('test3', {:primary_key=>'foo'})
|
||||
js: r.db('test').tableCreate('test3', {'primaryKey':'foo'})
|
||||
ot: partial({'tables_created':1})
|
||||
- def: tbl3 = r.db('test').table('test3')
|
||||
|
||||
- py: tbl.insert(r.range(0, 100).map({'id':r.row, 'a':r.row % 4}))
|
||||
rb: tbl.insert(r.range(0, 100).map{|row| {'id':row, a:row % 4}})
|
||||
js: tbl.insert(r.range(0, 100).map(function (row) { return {'id':row, 'a':row.mod(4)}; }))
|
||||
ot: partial({'errors':0, 'inserted':100})
|
||||
|
||||
- py: tbl2.insert(r.range(0, 100).map({'id':r.row, 'b':r.row % 4}))
|
||||
rb: tbl2.insert(r.range(0, 100).map{|row| {'id':row, b:row % 4}})
|
||||
js: tbl2.insert(r.range(0, 100).map(function (row) { return {'id':row, 'b':row.mod(4)}; }))
|
||||
ot: partial({'errors':0, 'inserted':100})
|
||||
|
||||
- py: tbl3.insert(r.range(0, 100).map({'foo':r.row, 'b':r.row % 4}))
|
||||
rb: tbl3.insert(r.range(0, 100).map{|row| {'foo':row, b:row % 4}})
|
||||
js: tbl3.insert(r.range(0, 100).map(function (row) { return {'foo':row, 'b':row.mod(4)}; }))
|
||||
ot: partial({'errors':0, 'inserted':100})
|
||||
|
||||
- py: otbl.insert(r.range(1,100).map({'id': r.row, 'a': r.row}))
|
||||
- py: otbl2.insert(r.range(1,100).map({'id': r.row, 'b': 2 * r.row}))
|
||||
|
||||
# Inner-Join
|
||||
|
||||
- def:
|
||||
py: ij = tbl.inner_join(tbl2, lambda x,y:x['a'] == y['b']).zip()
|
||||
js: ij = tbl.innerJoin(tbl2, function(x, y) { return x('a').eq(y('b')); }).zip()
|
||||
rb: ij = tbl.inner_join(tbl2){ |x, y| x[:a].eq y[:b] }.zip
|
||||
- cd: ij.count()
|
||||
ot: 2500
|
||||
- py: ij.filter(lambda row:row['a'] != row['b']).count()
|
||||
js: ij.filter(function(row) { return row('a').ne(row('b')); }).count()
|
||||
rb: ij.filter{ |row| row[:a].ne row[:b] }.count
|
||||
ot: 0
|
||||
|
||||
# Outer-Join
|
||||
- def:
|
||||
py: oj = tbl.outer_join(tbl2, lambda x,y:x['a'] == y['b']).zip()
|
||||
js: oj = tbl.outerJoin(tbl2, function(x, y) { return x('a').eq(y('b')); }).zip()
|
||||
rb: oj = tbl.outer_join(tbl2){ |x, y| x[:a].eq y[:b] }.zip
|
||||
- cd: oj.count()
|
||||
ot: 2500
|
||||
- py: oj.filter(lambda row:row['a'] != row['b']).count()
|
||||
js: oj.filter(function(row) { return row('a').ne(row('b')); }).count()
|
||||
rb: oj.filter{ |row| row[:a].ne row[:b] }.count
|
||||
ot: 0
|
||||
|
||||
# Ordered eq_join
|
||||
- py: blah = otbl.order_by("id").eq_join(r.row['id'], otbl2, ordered=True).zip()
|
||||
ot: [{'id': i, 'a': i, 'b': i * 2} for i in range(1, 100)]
|
||||
- py: blah = otbl.order_by(r.desc("id")).eq_join(r.row['id'], otbl2, ordered=True).zip()
|
||||
ot: [{'id': i, 'a': i, 'b': i * 2} for i in range(99, 0, -1)]
|
||||
- py: blah = otbl.order_by("id").eq_join(r.row['a'], otbl2, ordered=True).zip()
|
||||
ot: [{'id': i, 'a': i, 'b': i * 2} for i in range(1, 100)]
|
||||
|
||||
# Eq-Join
|
||||
- cd: tbl.eq_join('a', tbl2).zip().count()
|
||||
ot: 100
|
||||
|
||||
- cd: tbl.eq_join('fake', tbl2).zip().count()
|
||||
ot: 0
|
||||
|
||||
- py: tbl.eq_join(lambda x:x['a'], tbl2).zip().count()
|
||||
rb: tbl.eq_join(lambda{|x| x['a']}, tbl2).zip().count()
|
||||
js: tbl.eq_join(function(x) { return x('a'); }, tbl2).zip().count()
|
||||
ot: 100
|
||||
|
||||
- py: tbl.eq_join(lambda x:x['fake'], tbl2).zip().count()
|
||||
rb: tbl.eq_join(lambda{|x| x['fake']}, tbl2).zip().count()
|
||||
js: tbl.eq_join(function(x) { return x('fake'); }, tbl2).zip().count()
|
||||
ot: 0
|
||||
|
||||
- py: tbl.eq_join(lambda x:null, tbl2).zip().count()
|
||||
rb: tbl.eq_join(lambda{|x| null}, tbl2).zip().count()
|
||||
js: tbl.eq_join(function(x) { return null; }, tbl2).zip().count()
|
||||
ot: 0
|
||||
|
||||
- py: tbl.eq_join(lambda x:x['a'], tbl2).count()
|
||||
rb: tbl.eq_join(lambda {|x| x[:a]}, tbl2).count()
|
||||
js: tbl.eq_join(function(x) { return x('a'); }, tbl2).count()
|
||||
ot: 100
|
||||
|
||||
# eqjoin where id isn't a primary key
|
||||
- cd: tbl.eq_join('a', tbl3).zip().count()
|
||||
ot: 100
|
||||
|
||||
- py: tbl.eq_join(lambda x:x['a'], tbl3).count()
|
||||
rb: tbl.eq_join(lambda {|x| x[:a]}, tbl3).count()
|
||||
js: tbl.eq_join(function(x) { return x('a'); }, tbl3).count()
|
||||
ot: 100
|
||||
|
||||
# eq_join with r.row
|
||||
- py: tbl.eq_join(r.row['a'], tbl2).count()
|
||||
js: tbl.eq_join(r.row('a'), tbl2).count()
|
||||
ot: 100
|
||||
|
||||
# test an inner-join condition where inner-join differs from outer-join
|
||||
- def: left = r.expr([{'a':1},{'a':2},{'a':3}])
|
||||
- def: right = r.expr([{'b':2},{'b':3}])
|
||||
|
||||
- py: left.inner_join(right, lambda l, r:l['a'] == r['b']).zip()
|
||||
js: left.innerJoin(right, function(l, r) { return l('a').eq(r('b')); }).zip()
|
||||
rb: left.inner_join(right){ |lt, rt| lt[:a].eq(rt[:b]) }.zip
|
||||
ot: [{'a':2,'b':2},{'a':3,'b':3}]
|
||||
|
||||
# test an outer-join condition where outer-join differs from inner-join
|
||||
- py: left.outer_join(right, lambda l, r:l['a'] == r['b']).zip()
|
||||
js: left.outerJoin(right, function(l, r) { return l('a').eq(r('b')); }).zip()
|
||||
rb: left.outer_join(right){ |lt, rt| lt[:a].eq(rt[:b]) }.zip
|
||||
ot: [{'a':1},{'a':2,'b':2},{'a':3,'b':3}]
|
||||
|
||||
- rb: senders.insert({id:1, sender:'Sender One'})['inserted']
|
||||
ot: 1
|
||||
- rb: receivers.insert({id:1, receiver:'Receiver One'})['inserted']
|
||||
ot: 1
|
||||
- rb: messages.insert([{id:10, sender_id:1, receiver_id:1, msg:'Message One'}, {id:20, sender_id:1, receiver_id:1, msg:'Message Two'}, {id:30, sender_id:1, receiver_id:1, msg:'Message Three'}])['inserted']
|
||||
ot: 3
|
||||
|
||||
- rb: messages.orderby(index:'id').eq_join('sender_id', senders).without({right:{id:true}}).zip.eq_join('receiver_id', receivers).without({right:{id:true}}).zip
|
||||
ot: [{'id':10,'msg':'Message One','receiver':'Receiver One','receiver_id':1,'sender':'Sender One','sender_id':1},{'id':20,'msg':'Message Two','receiver':'Receiver One','receiver_id':1,'sender':'Sender One','sender_id':1},{'id':30,'msg':'Message Three','receiver':'Receiver One','receiver_id':1,'sender':'Sender One','sender_id':1}]
|
||||
|
||||
# Clean up
|
||||
|
||||
- cd: r.db('test').table_drop('test3')
|
||||
ot: partial({'tables_dropped':1})
|
@ -1,74 +0,0 @@
|
||||
desc: Tests RQL json parsing
|
||||
tests:
|
||||
|
||||
- cd: r.json("[1,2,3]")
|
||||
ot: [1,2,3]
|
||||
|
||||
- cd: r.json("1")
|
||||
ot: 1
|
||||
|
||||
- cd: r.json("{}")
|
||||
ot: {}
|
||||
|
||||
- cd: r.json('"foo"')
|
||||
ot: "foo"
|
||||
|
||||
- cd: r.json("[1,2")
|
||||
ot: err("ReqlQueryLogicError", 'Failed to parse "[1,2" as JSON:' + ' Missing a comma or \']\' after an array element.', [0])
|
||||
|
||||
- cd: r.json("[1,2,3]").to_json_string()
|
||||
ot: '[1,2,3]'
|
||||
|
||||
- js: r.json("[1,2,3]").toJSON()
|
||||
py: r.json("[1,2,3]").to_json()
|
||||
ot: '[1,2,3]'
|
||||
|
||||
- cd: r.json("{\"foo\":4}").to_json_string()
|
||||
ot: '{"foo":4}'
|
||||
|
||||
- js: r.json("{\"foo\":4}").toJSON()
|
||||
py: r.json("{\"foo\":4}").to_json()
|
||||
ot: '{"foo":4}'
|
||||
|
||||
# stress test: data is from http://www.mockaroo.com/
|
||||
- def: text = '[{"id":1,"first_name":"Harry","last_name":"Riley","email":"hriley0@usgs.gov","country":"Andorra","ip_address":"221.25.65.136"},{"id":2,"first_name":"Bonnie","last_name":"Anderson","email":"banderson1@list-manage.com","country":"Tuvalu","ip_address":"116.162.43.150"},{"id":3,"first_name":"Marie","last_name":"Schmidt","email":"mschmidt2@diigo.com","country":"Iraq","ip_address":"181.105.59.57"},{"id":4,"first_name":"Phillip","last_name":"Willis","email":"pwillis3@com.com","country":"Montenegro","ip_address":"24.223.139.156"}]'
|
||||
- def: sorted = '[{"country":"Andorra","email":"hriley0@usgs.gov","first_name":"Harry","id":1,"ip_address":"221.25.65.136","last_name":"Riley"},{"country":"Tuvalu","email":"banderson1@list-manage.com","first_name":"Bonnie","id":2,"ip_address":"116.162.43.150","last_name":"Anderson"},{"country":"Iraq","email":"mschmidt2@diigo.com","first_name":"Marie","id":3,"ip_address":"181.105.59.57","last_name":"Schmidt"},{"country":"Montenegro","email":"pwillis3@com.com","first_name":"Phillip","id":4,"ip_address":"24.223.139.156","last_name":"Willis"}]'
|
||||
|
||||
- cd: r.json(text).to_json_string()
|
||||
ot: sorted
|
||||
|
||||
- cd: r.expr(r.minval).to_json_string()
|
||||
ot: err('ReqlQueryLogicError', 'Cannot convert `r.minval` to JSON.')
|
||||
|
||||
- cd: r.expr(r.maxval).to_json_string()
|
||||
ot: err('ReqlQueryLogicError', 'Cannot convert `r.maxval` to JSON.')
|
||||
|
||||
- cd: r.expr(r.minval).coerce_to('string')
|
||||
ot: err('ReqlQueryLogicError', 'Cannot convert `r.minval` to JSON.')
|
||||
|
||||
- cd: r.expr(r.maxval).coerce_to('string')
|
||||
ot: err('ReqlQueryLogicError', 'Cannot convert `r.maxval` to JSON.')
|
||||
|
||||
- cd: r.time(2014,9,11, 'Z')
|
||||
runopts:
|
||||
time_format: 'raw'
|
||||
ot: {'timezone':'+00:00','$reql_type$':'TIME','epoch_time':1410393600}
|
||||
|
||||
- cd: r.time(2014,9,11, 'Z').to_json_string()
|
||||
ot: '{"$reql_type$":"TIME","epoch_time":1410393600,"timezone":"+00:00"}'
|
||||
|
||||
- cd: r.point(0,0)
|
||||
ot: {'$reql_type$':'GEOMETRY','coordinates':[0,0],'type':'Point'}
|
||||
|
||||
- cd: r.point(0,0).to_json_string()
|
||||
ot: '{"$reql_type$":"GEOMETRY","coordinates":[0,0],"type":"Point"}'
|
||||
|
||||
- def:
|
||||
rb: s = "\x66\x6f\x6f".force_encoding('BINARY')
|
||||
py: s = b'\x66\x6f\x6f'
|
||||
js: s = Buffer("\x66\x6f\x6f", 'binary')
|
||||
- cd: r.binary(s)
|
||||
ot: s
|
||||
|
||||
- cd: r.expr("foo").coerce_to("binary").to_json_string()
|
||||
ot: '{"$reql_type$":"BINARY","data":"Zm9v"}'
|
@ -1,128 +0,0 @@
|
||||
desc: Tests array limit variations
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# test simplistic array limits
|
||||
- cd: r.expr([1,1,1,1]).union([1, 1, 1, 1])
|
||||
runopts:
|
||||
array_limit: 8
|
||||
ot: [1,1,1,1,1,1,1,1]
|
||||
- cd: r.expr([1,2,3,4]).union([5, 6, 7, 8])
|
||||
runopts:
|
||||
array_limit: 4
|
||||
ot: err("ReqlResourceLimitError", "Array over size limit `4`.", [0])
|
||||
|
||||
# test array limits on query creation
|
||||
- cd: r.expr([1,2,3,4,5,6,7,8])
|
||||
runopts:
|
||||
array_limit: 4
|
||||
ot: err("ReqlResourceLimitError", "Array over size limit `4`.", [0])
|
||||
|
||||
# test bizarre array limits
|
||||
- cd: r.expr([1,2,3,4,5,6,7,8])
|
||||
runopts:
|
||||
array_limit: -1
|
||||
ot: err("ReqlQueryLogicError", "Illegal array size limit `-1`. (Must be >= 1.)", [])
|
||||
|
||||
- cd: r.expr([1,2,3,4,5,6,7,8])
|
||||
runopts:
|
||||
array_limit: 0
|
||||
ot: err("ReqlQueryLogicError", "Illegal array size limit `0`. (Must be >= 1.)", [])
|
||||
|
||||
# make enormous > 100,000 element array
|
||||
- def: ten_l = r.expr([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
|
||||
- def:
|
||||
js: ten_f = function(l) { return ten_l }
|
||||
py: ten_f = lambda l:list(range(1,11))
|
||||
- def:
|
||||
js: huge_l = r.expr(ten_l).concatMap(ten_f).concatMap(ten_f).concatMap(ten_f).concatMap(ten_f)
|
||||
py: huge_l = r.expr(ten_l).concat_map(ten_f).concat_map(ten_f).concat_map(ten_f).concat_map(ten_f)
|
||||
rb: huge_l = r.expr(ten_l).concat_map {|l| ten_l}.concat_map {|l| ten_l}.concat_map {|l| ten_l}.concat_map {|l| ten_l}
|
||||
- cd: huge_l.append(1).count()
|
||||
runopts:
|
||||
array_limit: 100001
|
||||
ot: 100001
|
||||
|
||||
# attempt to insert enormous array
|
||||
- cd: tbl.insert({'id':0, 'array':huge_l.append(1)})
|
||||
runopts:
|
||||
array_limit: 100001
|
||||
ot: partial({'errors':1, 'first_error':"Array too large for disk writes (limit 100,000 elements)."})
|
||||
|
||||
- cd: tbl.get(0)
|
||||
runopts:
|
||||
array_limit: 100001
|
||||
ot: (null)
|
||||
|
||||
# attempt to read array that violates limit from disk
|
||||
- cd: tbl.insert({'id':1, 'array':ten_l})
|
||||
ot: ({'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':1})
|
||||
- cd: tbl.get(1)
|
||||
runopts:
|
||||
array_limit: 4
|
||||
ot: ({'array':[1,2,3,4,5,6,7,8,9,10],'id':1})
|
||||
|
||||
|
||||
# Test that the changefeed queue size actually causes changes to be sent early.
|
||||
- cd: tbl.delete().get_field('deleted')
|
||||
ot: 1
|
||||
|
||||
- cd: c = tbl.changes({squash:1000000, changefeed_queue_size:10})
|
||||
py: c = tbl.changes(squash=1000000, changefeed_queue_size=10)
|
||||
|
||||
- cd: tbl.insert([{'id':0}, {'id':1}, {'id':2}, {'id':3}, {'id':4}, {'id':5}, {'id':6}]).get_field('inserted')
|
||||
ot: 7
|
||||
- py: fetch(c, 7)
|
||||
rb: fetch(c, 7)
|
||||
ot: bag([{'old_val':null, 'new_val':{'id':0}},
|
||||
{'old_val':null, 'new_val':{'id':1}},
|
||||
{'old_val':null, 'new_val':{'id':2}},
|
||||
{'old_val':null, 'new_val':{'id':3}},
|
||||
{'old_val':null, 'new_val':{'id':4}},
|
||||
{'old_val':null, 'new_val':{'id':5}},
|
||||
{'old_val':null, 'new_val':{'id':6}}])
|
||||
|
||||
- cd: tbl.insert([{'id':7}, {'id':8}, {'id':9}, {'id':10}, {'id':11}, {'id':12}, {'id':13}]).get_field('inserted')
|
||||
ot: 7
|
||||
- py: fetch(c, 7)
|
||||
rb: fetch(c, 7)
|
||||
ot: bag([{'old_val':null, 'new_val':{'id':7}},
|
||||
{'old_val':null, 'new_val':{'id':8}},
|
||||
{'old_val':null, 'new_val':{'id':9}},
|
||||
{'old_val':null, 'new_val':{'id':10}},
|
||||
{'old_val':null, 'new_val':{'id':11}},
|
||||
{'old_val':null, 'new_val':{'id':12}},
|
||||
{'old_val':null, 'new_val':{'id':13}}])
|
||||
|
||||
- cd: tbl.delete().get_field('deleted')
|
||||
ot: 14
|
||||
|
||||
- cd: c2 = tbl.changes({squash:1000000})
|
||||
py: c2 = tbl.changes(squash=1000000)
|
||||
runopts:
|
||||
changefeed_queue_size: 10
|
||||
|
||||
|
||||
- cd: tbl.insert([{'id':0}, {'id':1}, {'id':2}, {'id':3}, {'id':4}, {'id':5}, {'id':6}]).get_field('inserted')
|
||||
ot: 7
|
||||
- py: fetch(c2, 7)
|
||||
rb: fetch(c2, 7)
|
||||
ot: bag([{'old_val':null, 'new_val':{'id':0}},
|
||||
{'old_val':null, 'new_val':{'id':1}},
|
||||
{'old_val':null, 'new_val':{'id':2}},
|
||||
{'old_val':null, 'new_val':{'id':3}},
|
||||
{'old_val':null, 'new_val':{'id':4}},
|
||||
{'old_val':null, 'new_val':{'id':5}},
|
||||
{'old_val':null, 'new_val':{'id':6}}])
|
||||
|
||||
- cd: tbl.insert([{'id':7}, {'id':8}, {'id':9}, {'id':10}, {'id':11}, {'id':12}, {'id':13}]).get_field('inserted')
|
||||
ot: 7
|
||||
- py: fetch(c2, 7)
|
||||
rb: fetch(c2, 7)
|
||||
ot: bag([{'old_val':null, 'new_val':{'id':7}},
|
||||
{'old_val':null, 'new_val':{'id':8}},
|
||||
{'old_val':null, 'new_val':{'id':9}},
|
||||
{'old_val':null, 'new_val':{'id':10}},
|
||||
{'old_val':null, 'new_val':{'id':11}},
|
||||
{'old_val':null, 'new_val':{'id':12}},
|
||||
{'old_val':null, 'new_val':{'id':13}}])
|
@ -1,38 +0,0 @@
|
||||
desc: Tests for match
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- cd: r.expr("abcdefg").match("a(b.e)|b(c.e)")
|
||||
ot: ({'str':'bcde','groups':[null,{'start':2,'str':'cde','end':5}],'start':1,'end':5})
|
||||
- cd: r.expr("abcdefg").match("a(b.e)|B(c.e)")
|
||||
ot: (null)
|
||||
- cd: r.expr("abcdefg").match("(?i)a(b.e)|B(c.e)")
|
||||
ot: ({'str':'bcde','groups':[null,{'start':2,'str':'cde','end':5}],'start':1,'end':5})
|
||||
|
||||
- cd: r.expr(["aba", "aca", "ada", "aea"]).filter{|row| row.match("a(.)a")[:groups][0][:str].match("[cd]")}
|
||||
py: r.expr(["aba", "aca", "ada", "aea"]).filter(lambda row:row.match("a(.)a")['groups'][0]['str'].match("[cd]"))
|
||||
js: r.expr(["aba", "aca", "ada", "aea"]).filter(function(row){return row.match("a(.)a")('groups').nth(0)('str').match("[cd]")})
|
||||
ot: (["aca", "ada"])
|
||||
|
||||
- cd: tbl.insert([{'id':0,'a':'abc'},{'id':1,'a':'ab'},{'id':2,'a':'bc'}])
|
||||
ot: ({'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':3})
|
||||
|
||||
- cd: tbl.filter{|row| row['a'].match('b')}.orderby('id')
|
||||
py: tbl.filter(lambda row:row['a'].match('b')).order_by('id')
|
||||
js: tbl.filter(function(row){return row('a').match('b')}).order_by('id')
|
||||
ot: ([{'id':0,'a':'abc'},{'id':1,'a':'ab'},{'id':2,'a':'bc'}])
|
||||
- cd: tbl.filter{|row| row['a'].match('ab')}.orderby('id')
|
||||
py: tbl.filter(lambda row:row['a'].match('ab')).order_by('id')
|
||||
js: tbl.filter(function(row){return row('a').match('ab')}).order_by('id')
|
||||
ot: ([{'id':0,'a':'abc'},{'id':1,'a':'ab'}])
|
||||
- cd: tbl.filter{|row| row['a'].match('ab$')}.orderby('id')
|
||||
py: tbl.filter(lambda row:row['a'].match('ab$')).order_by('id')
|
||||
js: tbl.filter(function(row){return row('a').match('ab$')}).order_by('id')
|
||||
ot: ([{'id':1,'a':'ab'}])
|
||||
- cd: tbl.filter{|row| row['a'].match('^b$')}.orderby('id')
|
||||
py: tbl.filter(lambda row:row['a'].match('^b$')).order_by('id')
|
||||
js: tbl.filter(function(row){return row('a').match('^b$')}).order_by('id')
|
||||
ot: ([])
|
||||
|
||||
- cd: r.expr("").match("ab\\9")
|
||||
ot: |
|
||||
err("ReqlQueryLogicError", "Error in regexp `ab\\9` (portion `\\9`): invalid escape sequence: \\9", [])
|
@ -1,65 +0,0 @@
|
||||
desc: Tests for basic usage of the add operation
|
||||
tests:
|
||||
- cd: r.add(1, 1)
|
||||
ot: 2
|
||||
|
||||
- js: r(1).add(1)
|
||||
py:
|
||||
- r.expr(1) + 1
|
||||
- 1 + r.expr(1)
|
||||
- r.expr(1).add(1)
|
||||
rb:
|
||||
- r(1) + 1
|
||||
- r(1).add(1)
|
||||
ot: 2
|
||||
|
||||
- py: r.expr(-1) + 1
|
||||
js: r(-1).add(1)
|
||||
rb: (r -1) + 1
|
||||
ot: 0
|
||||
|
||||
- py: r.expr(1.75) + 8.5
|
||||
js: r(1.75).add(8.5)
|
||||
rb: (r 1.75) + 8.5
|
||||
ot: 10.25
|
||||
|
||||
# Add is polymorphic on strings
|
||||
- py: r.expr('') + ''
|
||||
js: r('').add('')
|
||||
rb: (r '') + ''
|
||||
ot: ''
|
||||
|
||||
- py: r.expr('abc') + 'def'
|
||||
js: r('abc').add('def')
|
||||
rb: (r 'abc') + 'def'
|
||||
ot: 'abcdef'
|
||||
|
||||
# Add is polymorphic on arrays
|
||||
- cd: r.expr([1,2]) + [3] + [4,5] + [6,7,8]
|
||||
js: r([1,2]).add([3]).add([4,5]).add([6,7,8])
|
||||
ot: [1,2,3,4,5,6,7,8]
|
||||
|
||||
# All arithmetic operations (except mod) actually support arbitrary arguments
|
||||
# but this feature can't be accessed in Python because it's operators are binary
|
||||
- js: r(1).add(2,3,4,5)
|
||||
ot: 15
|
||||
|
||||
- js: r('a').add('b', 'c', 'd')
|
||||
ot: 'abcd'
|
||||
|
||||
# Type errors
|
||||
- cd: r(1).add('a')
|
||||
py: r.expr(1) + 'a'
|
||||
rb: r(1) + 'a'
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [1])
|
||||
|
||||
- cd: r('a').add(1)
|
||||
py: r.expr('a') + 1
|
||||
rb: r('a') + 1
|
||||
ot: err("ReqlQueryLogicError", "Expected type STRING but found NUMBER.", [1])
|
||||
|
||||
- cd: r([]).add(1)
|
||||
py: r.expr([]) + 1
|
||||
rb: r([]) + 1
|
||||
ot: err("ReqlQueryLogicError", "Expected type ARRAY but found NUMBER.", [1])
|
||||
|
@ -1,46 +0,0 @@
|
||||
desc: Test named aliases for math and logic operators
|
||||
tests:
|
||||
|
||||
- cd:
|
||||
- r.expr(0).add(1)
|
||||
- r.add(0, 1)
|
||||
- r.expr(2).sub(1)
|
||||
- r.sub(2, 1)
|
||||
- r.expr(2).div(2)
|
||||
- r.div(2, 2)
|
||||
- r.expr(1).mul(1)
|
||||
- r.mul(1, 1)
|
||||
- r.expr(1).mod(2)
|
||||
- r.mod(1, 2)
|
||||
ot: 1
|
||||
|
||||
- cd:
|
||||
- r.expr(True).and(True)
|
||||
- r.expr(True).or(True)
|
||||
- r.and(True, True)
|
||||
- r.or(True, True)
|
||||
- r.expr(False).not()
|
||||
- r.not(False)
|
||||
py:
|
||||
- r.expr(True).and_(True)
|
||||
- r.expr(True).or_(True)
|
||||
- r.and_(True, True)
|
||||
- r.or_(True, True)
|
||||
- r.expr(False).not_()
|
||||
- r.not_(False)
|
||||
ot: True
|
||||
|
||||
- cd:
|
||||
- r.expr(1).eq(1)
|
||||
- r.expr(1).ne(2)
|
||||
- r.expr(1).lt(2)
|
||||
- r.expr(1).gt(0)
|
||||
- r.expr(1).le(1)
|
||||
- r.expr(1).ge(1)
|
||||
- r.eq(1, 1)
|
||||
- r.ne(1, 2)
|
||||
- r.lt(1, 2)
|
||||
- r.gt(1, 0)
|
||||
- r.le(1, 1)
|
||||
- r.ge(1, 1)
|
||||
ot: True
|
@ -1,477 +0,0 @@
|
||||
desc: Tests of comparison operators
|
||||
tests:
|
||||
|
||||
### Numeric comparisons
|
||||
|
||||
## basic <
|
||||
|
||||
- cd: r(1).lt(2)
|
||||
py:
|
||||
- r.expr(1) < 2
|
||||
- 1 < r.expr(2)
|
||||
- r.expr(1).lt(2)
|
||||
rb:
|
||||
- r(1) < 2
|
||||
- r(1).lt(2)
|
||||
- 1 < r(2)
|
||||
ot: true
|
||||
- cd: r(3).lt(2)
|
||||
py: r.expr(3) < 2
|
||||
rb: r(3) < 2
|
||||
ot: false
|
||||
- py: r.expr(2) < 2
|
||||
js: r(2).lt(2)
|
||||
rb: r(2) < 2
|
||||
ot: false
|
||||
|
||||
# All Comparisons can take an arbitrary number of arguments though
|
||||
# the functionality is only available in JS at the moment
|
||||
- js: r(1).lt(2, 3, 4)
|
||||
ot: true
|
||||
- js: r(1).lt(2, 3, 2)
|
||||
ot: false
|
||||
|
||||
## basic >
|
||||
|
||||
- cd: r(1).gt(2)
|
||||
py:
|
||||
- r.expr(1) > 2
|
||||
- 1 > r.expr(2)
|
||||
- r.expr(1).gt(2)
|
||||
rb:
|
||||
- r(1) > 2
|
||||
- r(1).gt(2)
|
||||
ot: false
|
||||
- py: r.expr(3) > 2
|
||||
js: r(3).gt(2)
|
||||
rb: r(3) > 2
|
||||
ot: true
|
||||
- py: r.expr(2) > 2
|
||||
js: r(2).gt(2)
|
||||
rb: r(2) > 2
|
||||
ot: false
|
||||
|
||||
- js: r(4).gt(3, 2, 1)
|
||||
ot: true
|
||||
- js: r(4).gt(3, 2, 3)
|
||||
ot: false
|
||||
|
||||
## basic ==
|
||||
|
||||
- cd: r(1).eq(2)
|
||||
py:
|
||||
- r.expr(1) == 2
|
||||
- 1 == r.expr(2)
|
||||
- r.expr(1).eq(2)
|
||||
rb: r(1).eq 2
|
||||
ot: false
|
||||
- py: r.expr(3) == 2
|
||||
js: r(3).eq(2)
|
||||
rb: r(3).eq 2
|
||||
ot: false
|
||||
- py: r.expr(2) == 2
|
||||
js: r(2).eq(2)
|
||||
rb: r(2).eq 2
|
||||
ot: true
|
||||
|
||||
- js: r(1).eq(1, 1, 1)
|
||||
ot: true
|
||||
- js: r(1).eq(1, 2, 1)
|
||||
ot: false
|
||||
|
||||
## basic !=
|
||||
|
||||
- cd: r(1).ne(2)
|
||||
py:
|
||||
- r.expr(1) != 2
|
||||
- 1 != r.expr(2)
|
||||
- r.expr(1).ne(2)
|
||||
rb: r(1).ne 2
|
||||
ot: true
|
||||
- py: r.expr(3) != 2
|
||||
js: r(3).ne(2)
|
||||
rb: r(3).ne 2
|
||||
ot: true
|
||||
- py: r.expr(2) != 2
|
||||
js: r(2).ne(2)
|
||||
rb: r(2).ne 2
|
||||
ot: false
|
||||
|
||||
- js: r(1).ne(3, 2, 4)
|
||||
ot: true
|
||||
- js: r(1).ne(3, 2, 3)
|
||||
ot: true
|
||||
|
||||
## basic <=
|
||||
|
||||
- js: r(1).le(2)
|
||||
py:
|
||||
- r.expr(1) <= 2
|
||||
- 1 <= r.expr(2)
|
||||
- r.expr(1).le(2)
|
||||
rb:
|
||||
- r(1) <= 2
|
||||
- r(1).le(2)
|
||||
ot: true
|
||||
- py: r.expr(3) <= 2
|
||||
js: r(3).le(2)
|
||||
rb: r(3) <= 2
|
||||
ot: false
|
||||
- py: r.expr(2) <= 2
|
||||
js: r(2).le(2)
|
||||
rb: r(2) <= 2
|
||||
ot: true
|
||||
|
||||
- js: r(1).le(1, 2, 2)
|
||||
ot: true
|
||||
- js: r(1).le(1, 3, 2)
|
||||
ot: false
|
||||
|
||||
## basic >=
|
||||
|
||||
- cd: r(1).ge(2)
|
||||
py:
|
||||
- r.expr(1) >= 2
|
||||
- 1 >= r.expr(2)
|
||||
- r.expr(1).ge(2)
|
||||
rb:
|
||||
- r(1) >= 2
|
||||
- r(1).ge(2)
|
||||
ot: false
|
||||
- py: r.expr(3) >= 2
|
||||
js: r(3).ge(2)
|
||||
rb: r(3) >= 2
|
||||
ot: true
|
||||
- py: r.expr(2) >= 2
|
||||
js: r(2).ge(2)
|
||||
rb: r(2) >= 2
|
||||
ot: true
|
||||
|
||||
- js: r(4).ge(4, 2, 2)
|
||||
ot: true
|
||||
- js: r(4).ge(4, 2, 3)
|
||||
ot: false
|
||||
|
||||
# Comparisons for NULL
|
||||
- cd: r(null).eq(null)
|
||||
py:
|
||||
- r.expr(null) == null
|
||||
- null == r.expr(null)
|
||||
ot: true
|
||||
|
||||
- cd: r(null).lt(null)
|
||||
py:
|
||||
- r.expr(null) < null
|
||||
- null < r.expr(null)
|
||||
- r.expr(null).lt(null)
|
||||
rb: r(null) < null
|
||||
ot: false
|
||||
|
||||
- cd: r(null).gt(null)
|
||||
py:
|
||||
- r.expr(null) > null
|
||||
- null > r.expr(null)
|
||||
- r.expr(null).gt(null)
|
||||
rb: r(null) > null
|
||||
ot: false
|
||||
|
||||
# Comparisons for STRING
|
||||
# STRING comparison should be lexicagraphical
|
||||
- py: r.expr('a') == 'a'
|
||||
cd: r('a').eq('a')
|
||||
ot: true
|
||||
|
||||
- py: r.expr('a') == 'aa'
|
||||
cd: r('a').eq('aa')
|
||||
ot: false
|
||||
|
||||
- py: r.expr('a') < 'aa'
|
||||
cd: r('a').lt('aa')
|
||||
ot: true
|
||||
|
||||
- py: r.expr('a') < 'bb'
|
||||
cd: r('a').lt('bb')
|
||||
ot: true
|
||||
|
||||
- py: r.expr('bb') > 'a'
|
||||
cd: r('bb').gt('a')
|
||||
ot: true
|
||||
|
||||
- py: r.expr('abcdef') < 'abcdeg'
|
||||
cd: r('abcdef').lt('abcdeg')
|
||||
ot: true
|
||||
|
||||
- py: r.expr('abcdefg') > 'abcdeg'
|
||||
cd: r('abcdefg').gt('abcdeg')
|
||||
ot: false
|
||||
|
||||
- py: r.expr('A quick brown fox') > 'A quick brawn fox'
|
||||
js: r('A quick brown fox').gt('A quick brawn fox')
|
||||
rb: r('A quick brown fox') > 'A quick brawn fox'
|
||||
ot: true
|
||||
|
||||
# Comparisons for ARRAY
|
||||
# Also lexicographical
|
||||
|
||||
- py: r.expr([1]) < [2]
|
||||
js: r([1]).lt([2])
|
||||
rb: r([1]) < [2]
|
||||
ot: true
|
||||
|
||||
- py: r.expr([1]) > [2]
|
||||
js: r([1]).gt([2])
|
||||
rb: r([1]) > [2]
|
||||
ot: false
|
||||
|
||||
- py: r.expr([1, 0]) < [2]
|
||||
js: r([1, 0]).lt([2])
|
||||
rb: r([1, 0]) < [2]
|
||||
ot: true
|
||||
|
||||
- py: r.expr([1, 0]) < [1]
|
||||
js: r([1, 0]).lt([1])
|
||||
rb: r([1, 0]) < [1]
|
||||
ot: false
|
||||
|
||||
- py: r.expr([1, 0]) > [0]
|
||||
js: r([1, 0]).gt([0])
|
||||
rb: r([1, 0]) > [0]
|
||||
ot: true
|
||||
|
||||
- py: r.expr([1, 'a']) < [1, 'b']
|
||||
js: r([1, 'a']).lt([1, 'b'])
|
||||
rb: r([1, 'a']) < [1, 'b']
|
||||
ot: true
|
||||
|
||||
- py: r.expr([0, 'z']) < [1, 'b']
|
||||
js: r([0, 'z']).lt([1, 'b'])
|
||||
rb: r([0, 'z']) < [1, 'b']
|
||||
ot: true
|
||||
|
||||
- py: r.expr([1, 1, 1]) < [1, 0, 2]
|
||||
js: r([1, 1, 1]).lt([1, 0, 2])
|
||||
rb: r([1, 1, 1]) < [1, 0, 2]
|
||||
ot: false
|
||||
|
||||
- py: r.expr([1, 0, 2]) < [1, 1, 1]
|
||||
js: r([1, 0, 2]).lt([1, 1, 1])
|
||||
rb: r([1, 0, 2]) < [1, 1, 1]
|
||||
ot: true
|
||||
|
||||
# Comparisons for OBJECT
|
||||
|
||||
- py: r.expr({'a':0}) == {'a':0}
|
||||
cd: r({'a':0}).eq({'a':0})
|
||||
ot: true
|
||||
|
||||
- py: r.expr({'a':0, 'b':1}) == {'b':1, 'a':0}
|
||||
cd: r({'a':0, 'b':1}).eq({'b':1, 'a':0})
|
||||
ot: true
|
||||
|
||||
- py: r.expr({'a':0, 'b':1, 'c':2}) == {'b':1, 'a':0}
|
||||
cd: r({'a':0, 'b':1, 'c':2}).eq({'b':1, 'a':0})
|
||||
ot: false
|
||||
|
||||
- py: r.expr({'a':0, 'b':1}) == {'b':1, 'a':0, 'c':2}
|
||||
cd: r({'a':0, 'b':1}).eq({'b':1, 'a':0, 'c':2})
|
||||
ot: false
|
||||
|
||||
- py: r.expr({'a':0, 'b':1, 'd':2}) == {'b':1, 'a':0, 'c':2}
|
||||
cd: r({'a':0, 'b':1, 'd':2}).eq({'b':1, 'a':0, 'c':2})
|
||||
ot: false
|
||||
|
||||
- py: r.expr({'a':0}) < {'b':0}
|
||||
cd: r({'a':0}).lt({'b':0})
|
||||
ot: true
|
||||
|
||||
- py: r.expr({'a':1}) < {'b':0}
|
||||
cd: r({'a':1}).lt({'b':0})
|
||||
ot: true
|
||||
|
||||
- py: r.expr({'b':1}) < {'b':0}
|
||||
cd: r({'b':1}).lt({'b':0})
|
||||
ot: false
|
||||
|
||||
- py: r.expr({'b':1}) < {'a':0}
|
||||
cd: r({'b':1}).lt({'a':0})
|
||||
ot: false
|
||||
|
||||
- py: r.expr({'a':0, 'b':1, 'c':2}) < {'a':0, 'b':1, 'c':2}
|
||||
cd: r({'a':0, 'b':1, 'c':2}).lt({'a':0, 'b':1, 'c':2})
|
||||
ot: false
|
||||
|
||||
- py: r.expr({'a':0, 'b':1, 'c':2, 'd':3}) < {'a':0, 'b':1, 'c':2}
|
||||
cd: r({'a':0, 'b':1, 'c':2, 'd':3}).lt({'a':0, 'b':1, 'c':2})
|
||||
ot: false
|
||||
|
||||
- py: r.expr({'a':0, 'b':1, 'c':2}) < {'a':0, 'b':1, 'c':2, 'd':3}
|
||||
cd: r({'a':0, 'b':1, 'c':2}).lt({'a':0, 'b':1, 'c':2, 'd':3})
|
||||
ot: true
|
||||
|
||||
- py: r.expr({'a':0, 'c':2}) < {'a':0, 'b':1, 'c':2}
|
||||
cd: r({'a':0, 'c':2}).lt({'a':0, 'b':1, 'c':2})
|
||||
ot: false
|
||||
|
||||
- py: r.expr({'a':0, 'c':2}) > {'a':0, 'b':1, 'c':2}
|
||||
cd: r({'a':0, 'c':2}).gt({'a':0, 'b':1, 'c':2})
|
||||
ot: true
|
||||
|
||||
# Comparisons across types
|
||||
# RQL primitive types compare as if mapped to the following numbers
|
||||
# MINVAL: 0
|
||||
# ARRAY: 1
|
||||
# BINARY: 2
|
||||
# BOOLEAN: 3
|
||||
# NULL: 4
|
||||
# NUMBER: 5
|
||||
# OBJECT: 6
|
||||
# STRING: 7
|
||||
# MAXVAL: 8
|
||||
- def:
|
||||
py: everything = r.expr([[],r.now(),r.binary(b"\x00"),false,null,-5,{},"a",r.maxval])
|
||||
js: everything = r.expr([[],r.now(),r.binary(Buffer("\x00")),false,null,-5,{},"a",r.maxval])
|
||||
rb: everything = r.expr([[],r.now(),r.binary("\x00"),false,null,-5,{},"a",r.maxval])
|
||||
|
||||
- js: r.and(r.args(everything.map(r.lt(r.minval, r.row))))
|
||||
py: r.and_(r.args(everything.map(r.lt(r.minval, r.row))))
|
||||
rb: r.and(r.args(everything.map{|x| r.lt(r.minval, x)}))
|
||||
ot: true
|
||||
|
||||
- js: r.or(r.args(everything.map(r.gt(r.minval, r.row))))
|
||||
py: r.or_(r.args(everything.map(r.gt(r.minval, r.row))))
|
||||
rb: r.or(r.args(everything.map{|x| r.gt(r.minval, x)}))
|
||||
ot: false
|
||||
|
||||
- cd: r.eq(r.minval, r.minval)
|
||||
ot: true
|
||||
|
||||
- py: r.expr([]) < True
|
||||
js: r([]).lt(true)
|
||||
rb: r([]) < true
|
||||
ot: true
|
||||
|
||||
- py: r.expr([1,2]) < False
|
||||
js: r([1,2]).lt(false)
|
||||
rb: r([1,2]) < false
|
||||
ot: true
|
||||
|
||||
- py: r.expr(False) < []
|
||||
js: r(false).lt([])
|
||||
rb: r(false) < []
|
||||
ot: false
|
||||
|
||||
- py: r.expr([]) < r.binary(b"\xAE")
|
||||
js: r([]).lt(r.binary(Buffer("\x00")))
|
||||
rb: r([]) < r.binary("")
|
||||
ot: true
|
||||
|
||||
- py: r.expr([1,2]) < r.binary(b"\xAE")
|
||||
js: r([1,2]).lt(r.binary(Buffer("\x00")))
|
||||
rb: r([1,2]) < r.binary("")
|
||||
ot: true
|
||||
|
||||
- py: True < r.expr(null)
|
||||
js: r(true).lt(null)
|
||||
rb: r(true) < null
|
||||
ot: true
|
||||
|
||||
- py: r.expr(null) > []
|
||||
js: r(null).gt([])
|
||||
rb: r(null) > []
|
||||
ot: true
|
||||
|
||||
- py: r.expr(null) < 12
|
||||
js: r(null).lt(12)
|
||||
rb: r(null) < 12
|
||||
ot: true
|
||||
|
||||
- py: r.expr(null) < -2
|
||||
js: r(null).lt(-2)
|
||||
rb: r(null) < -2
|
||||
ot: true
|
||||
|
||||
- py: r.expr(-12) < {}
|
||||
js: r(-12).lt({})
|
||||
rb: r(-12) < {}
|
||||
ot: true
|
||||
|
||||
- py: r.expr(100) < {'a':-12}
|
||||
js: r(100).lt({a:-12})
|
||||
rb: r(100) < { :a => 12 }
|
||||
ot: true
|
||||
|
||||
- py: r.expr(r.binary(b"\xAE")) < 12
|
||||
js: r(r.binary(Buffer("\x00"))).lt(12)
|
||||
rb: r(r.binary("")) < 12
|
||||
ot: false
|
||||
|
||||
- py: r.binary(b"0xAE") < 'abc'
|
||||
js: r.binary(Buffer("0x00")).lt('abc')
|
||||
rb: r.binary("") < 'abc'
|
||||
ot: true
|
||||
|
||||
- py: r.binary(b"0xAE") > r.now()
|
||||
js: r.binary(Buffer("0x00")).gt(r.now())
|
||||
rb: r.binary("") > r.now()
|
||||
ot: false
|
||||
|
||||
- cd: r.now() > 12
|
||||
js: r.now().gt(12)
|
||||
ot: true
|
||||
|
||||
- cd: r.now() > 'abc'
|
||||
js: r.now().gt('abc')
|
||||
ot: false
|
||||
|
||||
- py: r.expr("abc") > {'a':-12}
|
||||
js: r('abc').gt({a:-12})
|
||||
rb: r('abc') > { :a => 12 }
|
||||
ot: true
|
||||
|
||||
- py: r.expr("abc") > {'abc':'abc'}
|
||||
js: r('abc').gt({abc:'abc'})
|
||||
rb: r('abc') > { :abc => 'abc' }
|
||||
ot: true
|
||||
|
||||
- py: r.expr('zzz') > 128
|
||||
js: r('zzz').gt(128)
|
||||
rb: r('zzz') > 128
|
||||
ot: true
|
||||
|
||||
- py: r.expr('zzz') > {}
|
||||
js: r('zzz').gt({})
|
||||
rb: r('zzz') > {}
|
||||
ot: true
|
||||
|
||||
- py: 'zzz' > r.expr(-152)
|
||||
js: r('zzz').gt(-152)
|
||||
rb: r('zzz') > -152
|
||||
ot: true
|
||||
|
||||
- py: 'zzz' > r.expr(null)
|
||||
js: r('zzz').gt(null)
|
||||
rb: r('zzz') > null
|
||||
ot: true
|
||||
|
||||
- py: 'zzz' > r.expr([])
|
||||
js: r('zzz').gt([])
|
||||
rb: r('zzz') > []
|
||||
ot: true
|
||||
|
||||
- def:
|
||||
rb: everything2 = r.expr([r.minval,[],r.now(),r.binary("\x00"),false,null,-5,{},"a"])
|
||||
py: everything2 = r.expr([r.minval,[],r.now(),r.binary(b"\x00"),false,null,-5,{},"a"])
|
||||
js: everything2 = r.expr([r.minval,[],r.now(),r.binary(Buffer("\x00")),false,null,-5,{},"a"])
|
||||
|
||||
- js: r.and(r.args(everything2.map(r.gt(r.maxval, r.row))))
|
||||
py: r.and_(r.args(everything2.map(r.gt(r.maxval, r.row))))
|
||||
rb: r.and(r.args(everything2.map{|x| r.gt(r.maxval, x)}))
|
||||
ot: true
|
||||
|
||||
- js: r.or(r.args(everything2.map(r.lt(r.maxval, r.row))))
|
||||
py: r.or_(r.args(everything2.map(r.lt(r.maxval, r.row))))
|
||||
rb: r.or(r.args(everything2.map{|x| r.lt(r.maxval, x)}))
|
||||
ot: false
|
||||
|
||||
- cd: r.eq(r.maxval, r.maxval)
|
||||
ot: true
|
@ -1,52 +0,0 @@
|
||||
desc: Tests for the basic usage of the division operation
|
||||
tests:
|
||||
|
||||
- cd: r(4).div(2)
|
||||
py:
|
||||
- r.expr(4) / 2
|
||||
- 4 / r.expr(2)
|
||||
- r.expr(4).div(2)
|
||||
rb:
|
||||
- (r 4) / 2
|
||||
- r(4).div 2
|
||||
- 4 / r(2)
|
||||
ot: 2
|
||||
|
||||
- py: r.expr(-1) / -2
|
||||
js: r(-1).div(-2)
|
||||
rb: (r -1) / -2
|
||||
ot: 0.5
|
||||
|
||||
- py: r.expr(4.9) / 0.7
|
||||
js: r(4.9).div(0.7)
|
||||
rb: (r 4.9) / 0.7
|
||||
ot: 4.9 / 0.7
|
||||
|
||||
- cd: r.expr(1).div(2,3,4,5)
|
||||
ot: 1.0/120
|
||||
|
||||
# Divide by zero test
|
||||
- cd:
|
||||
- r(1).div(0)
|
||||
- r(2.0).div(0)
|
||||
- r(3).div(0.0)
|
||||
- r(4.0).div(0.0)
|
||||
- r(0).div(0)
|
||||
- r(0.0).div(0.0)
|
||||
py:
|
||||
- r.expr(1) / 0
|
||||
- r.expr(2.0) / 0
|
||||
- r.expr(3) / 0.0
|
||||
- r.expr(4.0) / 0.0
|
||||
- r.expr(0) / 0
|
||||
- r.expr(0.0) / 0.0
|
||||
ot: err('ReqlQueryLogicError', 'Cannot divide by zero.', [1])
|
||||
|
||||
# Type errors
|
||||
- py: r.expr('a') / 0.8
|
||||
cd: r('a').div(0.8)
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [0])
|
||||
|
||||
- py: r.expr(1) / 'a'
|
||||
cd: r(1).div('a')
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [1])
|
@ -1,114 +0,0 @@
|
||||
desc: tests for `floor`, `ceil`, and `round`, tests inspired by the Python test suite
|
||||
tests:
|
||||
- cd: r.floor(1.0).type_of()
|
||||
ot: "NUMBER"
|
||||
- cd: r.floor(1.0)
|
||||
ot: 1.0
|
||||
- cd: r.expr(1.0).floor()
|
||||
ot: 1.0
|
||||
|
||||
- cd: r.floor(0.5)
|
||||
ot: 0.0
|
||||
- cd: r.floor(1.0)
|
||||
ot: 1.0
|
||||
- cd: r.floor(1.5)
|
||||
ot: 1.0
|
||||
- cd: r.floor(-0.5)
|
||||
ot: -1.0
|
||||
- cd: r.floor(-1.0)
|
||||
ot: -1.0
|
||||
- cd: r.floor(-1.5)
|
||||
ot: -2.0
|
||||
|
||||
- cd: r.expr('X').floor()
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [])
|
||||
|
||||
|
||||
- cd: r.ceil(1.0).type_of()
|
||||
ot: "NUMBER"
|
||||
- cd: r.ceil(1.0)
|
||||
ot: 1.0
|
||||
- cd: r.expr(1.0).ceil()
|
||||
ot: 1.0
|
||||
|
||||
- cd: r.ceil(0.5)
|
||||
ot: 1.0
|
||||
- cd: r.ceil(1.0)
|
||||
ot: 1.0
|
||||
- cd: r.ceil(1.5)
|
||||
ot: 2.0
|
||||
- cd: r.ceil(-0.5)
|
||||
ot: 0.0
|
||||
- cd: r.ceil(-1.0)
|
||||
ot: -1.0
|
||||
- cd: r.ceil(-1.5)
|
||||
ot: -1.0
|
||||
|
||||
- cd: r.expr('X').ceil()
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [])
|
||||
|
||||
|
||||
- cd: r.round(1.0).type_of()
|
||||
ot: "NUMBER"
|
||||
- cd: r.round(1.0)
|
||||
ot: 1.0
|
||||
- cd: r.expr(1.0).round()
|
||||
ot: 1.0
|
||||
|
||||
- cd: r.round(0.5)
|
||||
ot: 1.0
|
||||
- cd: r.round(-0.5)
|
||||
ot: -1.0
|
||||
|
||||
- cd: r.round(0.0)
|
||||
ot: 0.0
|
||||
- cd: r.round(1.0)
|
||||
ot: 1.0
|
||||
- cd: r.round(10.0)
|
||||
ot: 10.0
|
||||
- cd: r.round(1000000000.0)
|
||||
ot: 1000000000.0
|
||||
- cd: r.round(1e20)
|
||||
ot: 1e20
|
||||
|
||||
- cd: r.round(-1.0)
|
||||
ot: -1.0
|
||||
- cd: r.round(-10.0)
|
||||
ot: -10.0
|
||||
- cd: r.round(-1000000000.0)
|
||||
ot: -1000000000.0
|
||||
- cd: r.round(-1e20)
|
||||
ot: -1e20
|
||||
|
||||
- cd: r.round(0.1)
|
||||
ot: 0.0
|
||||
- cd: r.round(1.1)
|
||||
ot: 1.0
|
||||
- cd: r.round(10.1)
|
||||
ot: 10.0
|
||||
- cd: r.round(1000000000.1)
|
||||
ot: 1000000000.0
|
||||
|
||||
- cd: r.round(-1.1)
|
||||
ot: -1.0
|
||||
- cd: r.round(-10.1)
|
||||
ot: -10.0
|
||||
- cd: r.round(-1000000000.1)
|
||||
ot: -1000000000.0
|
||||
|
||||
- cd: r.round(0.9)
|
||||
ot: 1.0
|
||||
- cd: r.round(9.9)
|
||||
ot: 10.0
|
||||
- cd: r.round(999999999.9)
|
||||
ot: 1000000000.0
|
||||
|
||||
- cd: r.round(-0.9)
|
||||
ot: -1.0
|
||||
- cd: r.round(-9.9)
|
||||
ot: -10.0
|
||||
- cd: r.round(-999999999.9)
|
||||
ot: -1000000000.0
|
||||
|
||||
- cd: r.expr('X').round()
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [])
|
@ -1,169 +0,0 @@
|
||||
desc: These tests are aimed at &&, ||, and !
|
||||
tests:
|
||||
|
||||
## basic operator usage
|
||||
|
||||
# Python overloads '&' for 'and'
|
||||
- py:
|
||||
- r.expr(true) & true
|
||||
- true & r.expr(true)
|
||||
- r.and_(true,true)
|
||||
- r.expr(true).and_(true)
|
||||
rb:
|
||||
- r(true) & true
|
||||
- r(true) & r(true)
|
||||
- r.and(true,true)
|
||||
- r(true).and(true)
|
||||
js:
|
||||
- r.and(true,true)
|
||||
- r(true).and(true)
|
||||
ot: true
|
||||
- py:
|
||||
- r.expr(true) & false
|
||||
- r.expr(false) & false
|
||||
- true & r.expr(false)
|
||||
- false & r.expr(false)
|
||||
- r.and_(true,false)
|
||||
- r.and_(false,false)
|
||||
- r.expr(true).and_(false)
|
||||
- r.expr(false).and_(false)
|
||||
rb:
|
||||
- r(true) & false
|
||||
- r(false) & false
|
||||
- r(true) & r(false)
|
||||
- r(false) & r(false)
|
||||
- r.and(true,false)
|
||||
- r.and(false,false)
|
||||
- r(true).and(false)
|
||||
- r(false).and(false)
|
||||
js:
|
||||
- r.and(true,false)
|
||||
- r.and(false,false)
|
||||
- r(true).and(false)
|
||||
- r(false).and(false)
|
||||
ot: false
|
||||
|
||||
# Python overloads '|' for 'or'
|
||||
- py:
|
||||
- r.expr(true) | true
|
||||
- r.expr(true) | false
|
||||
- true | r.expr(true)
|
||||
- true | r.expr(false)
|
||||
- r.or_(true,true)
|
||||
- r.or_(true,false)
|
||||
- r.expr(true).or_(true)
|
||||
- r.expr(true).or_(false)
|
||||
rb:
|
||||
- r(true) | true
|
||||
- r(true) | false
|
||||
- r(true) | r(true)
|
||||
- r(true) | r(false)
|
||||
- r.or(true,true)
|
||||
- r.or(true,false)
|
||||
- r(true).or(true)
|
||||
- r(true).or(false)
|
||||
js:
|
||||
- r.or(true,true)
|
||||
- r.or(true,false)
|
||||
- r(true).or(true)
|
||||
- r(true).or(false)
|
||||
ot: true
|
||||
- py:
|
||||
- r.expr(false) | false
|
||||
- false | r.expr(false)
|
||||
- r.and_(false,false)
|
||||
- r.expr(false).and_(false)
|
||||
rb:
|
||||
- r(false) | false
|
||||
- r(false) | r(false)
|
||||
- r.and(false,false)
|
||||
- r(false).and(false)
|
||||
js:
|
||||
- r.and(false,false)
|
||||
- r(false).and(false)
|
||||
ot: false
|
||||
|
||||
# Python overloads '~' for 'not'
|
||||
- py:
|
||||
- ~r.expr(True)
|
||||
- r.not_(True)
|
||||
cd: r(true).not()
|
||||
ot: false
|
||||
- py:
|
||||
- ~r.expr(False)
|
||||
- r.not_(False)
|
||||
cd: r(false).not()
|
||||
ot: true
|
||||
- py: r.expr(True).not_()
|
||||
cd: r(true).not()
|
||||
ot: false
|
||||
- py: r.expr(False).not_()
|
||||
cd: r(false).not()
|
||||
ot: true
|
||||
|
||||
## DeMorgan's rules!
|
||||
|
||||
- py:
|
||||
- ~r.and_(True, True) == r.or_(~r.expr(True), ~r.expr(True))
|
||||
- ~r.and_(True, False) == r.or_(~r.expr(True), ~r.expr(False))
|
||||
- ~r.and_(False, False) == r.or_(~r.expr(False), ~r.expr(False))
|
||||
- ~r.and_(False, True) == r.or_(~r.expr(False), ~r.expr(True))
|
||||
cd:
|
||||
- r(true).and(true).not().eq(r(true).not().or(r(true).not()))
|
||||
- r(true).and(false).not().eq(r(true).not().or(r(false).not()))
|
||||
- r(false).and(false).not().eq(r(false).not().or(r(false).not()))
|
||||
- r(false).and(true).not().eq(r(false).not().or(r(true).not()))
|
||||
ot: true
|
||||
|
||||
# Test multiple arguments to 'and' and 'or'
|
||||
- cd: r(true).and(true, true, true, true)
|
||||
py: r.and_(True, True, True, True, True)
|
||||
ot: true
|
||||
- cd: r(true).and(true, true, false, true)
|
||||
py: r.and_(True, True, True, False, True)
|
||||
ot: false
|
||||
- cd: r(true).and(false, true, false, true)
|
||||
py: r.and_(True, False, True, False, True)
|
||||
ot: false
|
||||
- cd: r(false).or(false, false, false, false)
|
||||
py: r.or_(False, False, False, False, False)
|
||||
ot: false
|
||||
- cd: r(false).or(false, false, true, false)
|
||||
py: r.or_(False, False, False, True, False)
|
||||
ot: true
|
||||
- cd: r(false).or(true, false, true, false)
|
||||
py: r.or_(False, True, False, True, False)
|
||||
ot: true
|
||||
|
||||
# Test that precedence errors are detected
|
||||
- js: r.expr(r.expr('a')('b')).default(2)
|
||||
py: r.expr(r.expr('a')['b']).default(2)
|
||||
rb: r(r('a')['b']).default(2)
|
||||
ot: err("ReqlQueryLogicError", "Cannot perform bracket on a non-object non-sequence `\"a\"`.", [])
|
||||
- py: r.expr(r.expr(True) & r.expr(False) == r.expr(False) | r.expr(True))
|
||||
ot: err("ReqlDriverCompileError", "Calling '==' on result of infix bitwise operator:", [])
|
||||
- py: r.expr(r.and_(True, False) == r.or_(False, True))
|
||||
ot: False
|
||||
- rb: r.expr(r.expr(True) & r.expr(False) >= r.expr(False) | r.expr(True))
|
||||
py: r.expr(r.expr(True) & r.expr(False) >= r.expr(False) | r.expr(True))
|
||||
ot: err("ReqlDriverCompileError", "Calling '>=' on result of infix bitwise operator:", [])
|
||||
- cd: r.expr(r.and(True, False) >= r.or(False, True))
|
||||
py: r.expr(r.and_(True, False) >= r.or_(False, True))
|
||||
ot: False
|
||||
|
||||
# Type errors
|
||||
- py: r.expr(1) & True
|
||||
cd: r(1).and(true)
|
||||
ot: true
|
||||
|
||||
- py: r.expr(False) | 'str'
|
||||
cd: r(false).or('str')
|
||||
ot: ("str")
|
||||
|
||||
- py: ~r.expr(1)
|
||||
cd: r(1).not()
|
||||
ot: false
|
||||
|
||||
- py: ~r.expr(null)
|
||||
cd: r(null).not()
|
||||
ot: true
|
@ -1,11 +0,0 @@
|
||||
desc: Tests of nested arithmetic expressions
|
||||
tests:
|
||||
|
||||
- py: (((4 + 2 * (r.expr(26) % 18)) / 5) - 3)
|
||||
js: r(4).add(r(2).mul(r(26).mod(18))).div(5).sub(3)
|
||||
rb:
|
||||
- ((((r 4) + (r 2) * ((r 26) % 18)) / 5) -3)
|
||||
- (((4 + 2 * ((r 26) % 18)) / 5) -3)
|
||||
ot: 1
|
||||
|
||||
# Prescedence set by host langauge
|
@ -1,34 +0,0 @@
|
||||
desc: Tests for the basic usage of the mod operation
|
||||
tests:
|
||||
|
||||
- cd: r.expr(10).mod(3)
|
||||
py:
|
||||
- r.expr(10) % 3
|
||||
- 10 % r.expr(3)
|
||||
- r.expr(10).mod(3)
|
||||
rb:
|
||||
- (r 10) % 3
|
||||
- r(10).mod 3
|
||||
- 10 % (r 3)
|
||||
ot: 1
|
||||
|
||||
- cd: r.expr(-10).mod(-3)
|
||||
py: r.expr(-10) % -3
|
||||
rb: (r -10) % -3
|
||||
ot: -1
|
||||
|
||||
# Type errors
|
||||
- cd: r.expr(4).mod('a')
|
||||
py: r.expr(4) % 'a'
|
||||
rb: r(4) % 'a'
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [1])
|
||||
|
||||
- cd: r.expr('a').mod(1)
|
||||
py: r.expr('a') % 1
|
||||
rb: r('a') % 1
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [0])
|
||||
|
||||
- cd: r.expr('a').mod('b')
|
||||
py: r.expr('a') % 'b'
|
||||
rb: r('a') % 'b'
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [0])
|
@ -1,60 +0,0 @@
|
||||
desc: Tests for the basic usage of the multiplication operation
|
||||
tests:
|
||||
|
||||
- cd: r.expr(1).mul(2)
|
||||
py:
|
||||
- r.expr(1) * 2
|
||||
- 1 * r.expr(2)
|
||||
- r.expr(1).mul(2)
|
||||
rb:
|
||||
- (r 1) * 2
|
||||
- r(1).mul(2)
|
||||
- 1 * (r 2)
|
||||
ot: 2
|
||||
|
||||
- py: r.expr(-1) * -1
|
||||
js: r(-1).mul(-1)
|
||||
rb: (r -1) * -1
|
||||
ot: 1
|
||||
|
||||
- cd: r.expr(1.5).mul(4.5)
|
||||
py: r.expr(1.5) * 4.5
|
||||
rb: (r 1.5) * 4.5
|
||||
ot: 6.75
|
||||
|
||||
- py: r.expr([1,2,3]) * 3
|
||||
js: r([1,2,3]).mul(3)
|
||||
rb: (r [1,2,3]) * 3
|
||||
ot: [1,2,3,1,2,3,1,2,3]
|
||||
|
||||
- cd: r.expr(1).mul(2,3,4,5)
|
||||
ot: 120
|
||||
|
||||
- cd: r(2).mul([1,2,3], 2)
|
||||
py: # this form does not work in Python
|
||||
ot: [1,2,3,1,2,3,1,2,3,1,2,3]
|
||||
|
||||
- cd: r([1,2,3]).mul(2, 2)
|
||||
py: # this form does not work in Python
|
||||
ot: [1,2,3,1,2,3,1,2,3,1,2,3]
|
||||
|
||||
- cd: r(2).mul(2, [1,2,3])
|
||||
py: # this form does not work in Python
|
||||
ot: [1,2,3,1,2,3,1,2,3,1,2,3]
|
||||
|
||||
# Type errors
|
||||
- py: r.expr('a') * 0.8
|
||||
cd: r('a').mul(0.8)
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [0])
|
||||
|
||||
- py: r.expr(1) * 'a'
|
||||
cd: r(1).mul('a')
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [1])
|
||||
|
||||
- py: r.expr('b') * 'a'
|
||||
cd: r('b').mul('a')
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [0])
|
||||
|
||||
- py: r.expr([]) * 1.5
|
||||
cd: r([]).mul(1.5)
|
||||
ot: err('ReqlQueryLogicError', 'Number not an integer: 1.5', [0])
|
@ -1,37 +0,0 @@
|
||||
desc: Tests for basic usage of the subtraction operation
|
||||
tests:
|
||||
|
||||
- cd: r.expr(1).sub(1)
|
||||
py:
|
||||
- r.expr(1) - 1
|
||||
- 1 - r.expr(1)
|
||||
- r.expr(1).sub(1)
|
||||
rb:
|
||||
- (r 1) - 1
|
||||
- 1 - (r 1)
|
||||
- r(1).sub(1)
|
||||
- r.expr(1).sub(1)
|
||||
ot: 0
|
||||
|
||||
- cd: r.expr(-1).sub(1)
|
||||
py: r.expr(-1) - 1
|
||||
rb: (r -1) - 1
|
||||
ot: -2
|
||||
|
||||
- cd: r.expr(1.75).sub(8.5)
|
||||
py: r.expr(1.75) - 8.5
|
||||
rb: (r 1.75) - 8.5
|
||||
ot: -6.75
|
||||
|
||||
- cd: r.expr(1).sub(2,3,4,5)
|
||||
ot: -13
|
||||
|
||||
# Type errors
|
||||
- cd: r.expr('a').sub(0.8)
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [0])
|
||||
|
||||
- cd: r.expr(1).sub('a')
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [1])
|
||||
|
||||
- cd: r.expr('b').sub('a')
|
||||
ot: err('ReqlQueryLogicError', 'Expected type NUMBER but found STRING.', [0])
|
@ -1,14 +0,0 @@
|
||||
desc: Tests meta operations in composite queries
|
||||
tests:
|
||||
|
||||
- py: r.expr([1,2,3]).for_each(r.db_create('db_' + r.row.coerce_to('string')))
|
||||
ot: ({'dbs_created':3,'config_changes':arrlen(3)})
|
||||
|
||||
- py: |
|
||||
r.db_list().set_difference(["rethinkdb", "test"]).for_each(lambda db_name:
|
||||
r.expr([1,2,3]).for_each(lambda i:
|
||||
r.db(db_name).table_create('tbl_' + i.coerce_to('string'))))
|
||||
ot: partial({'tables_created':9})
|
||||
|
||||
- py: r.db_list().set_difference(["rethinkdb", "test"]).for_each(r.db_drop(r.row))
|
||||
ot: partial({'dbs_dropped':3,'tables_dropped':9})
|
@ -1,51 +0,0 @@
|
||||
desc: Tests meta queries for databases
|
||||
tests:
|
||||
|
||||
# We should always start out with a 'test' database and the special 'rethinkdb'
|
||||
# database
|
||||
- cd: r.db_list()
|
||||
ot: bag(['rethinkdb', 'test'])
|
||||
|
||||
## DB create
|
||||
|
||||
- cd: r.db_create('a')
|
||||
ot: partial({'dbs_created':1})
|
||||
- cd: r.db_create('b')
|
||||
ot: partial({'dbs_created':1})
|
||||
|
||||
## DB list
|
||||
|
||||
- cd: r.db_list()
|
||||
ot: bag(['rethinkdb', 'a', 'b', 'test'])
|
||||
|
||||
## DB config
|
||||
|
||||
- cd: r.db('a').config()
|
||||
ot: {'name':'a','id':uuid()}
|
||||
|
||||
## DB drop
|
||||
|
||||
- cd: r.db_drop('b')
|
||||
ot: partial({'dbs_dropped':1})
|
||||
|
||||
- cd: r.db_list()
|
||||
ot: bag(['rethinkdb', 'a', 'test'])
|
||||
|
||||
- cd: r.db_drop('a')
|
||||
ot: partial({'dbs_dropped':1})
|
||||
|
||||
- cd: r.db_list()
|
||||
ot: bag(['rethinkdb', 'test'])
|
||||
|
||||
## DB errors
|
||||
- cd: r.db_create('bar')
|
||||
ot: partial({'dbs_created':1})
|
||||
|
||||
- cd: r.db_create('bar')
|
||||
ot: err('ReqlOpFailedError', 'Database `bar` already exists.', [0])
|
||||
|
||||
- cd: r.db_drop('bar')
|
||||
ot: partial({'dbs_dropped':1})
|
||||
|
||||
- cd: r.db_drop('bar')
|
||||
ot: err('ReqlOpFailedError', 'Database `bar` does not exist.', [0])
|
@ -1,365 +0,0 @@
|
||||
desc: Tests meta queries for creating and deleting tables
|
||||
tests:
|
||||
|
||||
- def: db = r.db('test')
|
||||
|
||||
- cd: db.table_list()
|
||||
ot: []
|
||||
|
||||
- cd: r.db('rethinkdb').info()
|
||||
ot: ({'type':'DB','name':'rethinkdb','id':null})
|
||||
|
||||
- cd: r.db('rethinkdb').table('stats').info()
|
||||
ot: partial({'db':{'type':'DB','name':'rethinkdb','id':null},
|
||||
'type':'TABLE','id':null,'name':'stats',
|
||||
'indexes':[],'primary_key':'id'})
|
||||
|
||||
# Table create
|
||||
- cd: db.table_create('a')
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- cd: db.table_list()
|
||||
ot: ['a']
|
||||
|
||||
- cd: db.table_create('b')
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- cd: db.table_list()
|
||||
ot: bag(['a', 'b'])
|
||||
|
||||
# Table drop
|
||||
- cd: db.table_drop('a')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- cd: db.table_list()
|
||||
ot: ['b']
|
||||
|
||||
- cd: db.table_drop('b')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- cd: db.table_list()
|
||||
ot: []
|
||||
|
||||
# Table create options
|
||||
- py: db.table_create('ab', durability='soft')
|
||||
js: db.table_create('ab', {durability:'soft'})
|
||||
rb: db.table_create('ab', :durability => 'soft')
|
||||
ot: partial({'tables_created':1,'config_changes':[partial({'new_val':partial({'durability':'soft'})})]})
|
||||
|
||||
- cd: db.table_drop('ab')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- py: db.table_create('ab', durability='hard')
|
||||
js: db.table_create('ab', {durability:'hard'})
|
||||
rb: db.table_create('ab', :durability => 'hard')
|
||||
ot: partial({'tables_created':1,'config_changes':[partial({'new_val':partial({'durability':'hard'})})]})
|
||||
|
||||
- cd: db.table_drop('ab')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- py: db.table_create('ab', durability='fake')
|
||||
js: db.table_create('ab', {durability:'fake'})
|
||||
rb: db.table_create('ab', :durability => 'fake')
|
||||
ot: err('ReqlQueryLogicError', 'Durability option `fake` unrecognized (options are "hard" and "soft").')
|
||||
|
||||
- py: db.table_create('ab', primary_key='bar', shards=2, replicas=1)
|
||||
js: db.tableCreate('ab', {primary_key:'bar', shards:2, replicas:1})
|
||||
rb: db.table_create('ab', {:primary_key => 'bar', :shards => 1, :replicas => 1})
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- cd: db.table_drop('ab')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- py: db.table_create('ab', primary_key='bar', primary_replica_tag='default')
|
||||
js: db.tableCreate('ab', {primary_key:'bar', primaryReplicaTag:'default'})
|
||||
rb: db.table_create('ab', {:primary_key => 'bar', :primary_replica_tag => 'default'})
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- cd: db.table_drop('ab')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- py: db.table_create('ab', nonvoting_replica_tags=['default'])
|
||||
js: db.tableCreate('ab', {nonvotingReplicaTags:['default']})
|
||||
rb: db.table_create('ab', {:nonvoting_replica_tags => ['default']})
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- cd: db.table_drop('ab')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
# Table reconfigure
|
||||
- cd: db.table_create('a')
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas=1)
|
||||
js: db.table('a').reconfigure({shards:1, replicas:1})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => 1)
|
||||
ot: partial({'reconfigured':1})
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas={"default":1}, nonvoting_replica_tags=['default'], primary_replica_tag='default')
|
||||
js: db.table('a').reconfigure({shards:1, replicas:{default:1}, nonvoting_replica_tags:['default'], primary_replica_tag:'default'})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => {:default => 1}, :nonvoting_replica_tags => ['default'], :primary_replica_tag => 'default')
|
||||
ot: partial({'reconfigured':1})
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas=1, dry_run=True)
|
||||
js: db.table('a').reconfigure({shards:1, replicas:1, dry_run:true})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => 1, :dry_run => true)
|
||||
ot: partial({'reconfigured':0})
|
||||
|
||||
- py: db.table('a').reconfigure(emergency_repair="unsafe_rollback")
|
||||
js: db.table('a').reconfigure({emergency_repair:"unsafe_rollback"})
|
||||
rb: db.table('a').reconfigure(:emergency_repair => "unsafe_rollback")
|
||||
ot: err('ReqlOpFailedError', 'This table doesn\'t need to be repaired.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(emergency_repair="unsafe_rollback", dry_run=True)
|
||||
js: db.table('a').reconfigure({emergency_repair:"unsafe_rollback", dry_run:true})
|
||||
rb: db.table('a').reconfigure(:emergency_repair => "unsafe_rollback", :dry_run => true)
|
||||
ot: err('ReqlOpFailedError', 'This table doesn\'t need to be repaired.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(emergency_repair="unsafe_rollback_or_erase")
|
||||
js: db.table('a').reconfigure({emergency_repair:"unsafe_rollback_or_erase"})
|
||||
rb: db.table('a').reconfigure(:emergency_repair => "unsafe_rollback_or_erase")
|
||||
ot: err('ReqlOpFailedError', 'This table doesn\'t need to be repaired.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(emergency_repair=None, shards=1, replicas=1, dry_run=True)
|
||||
js: db.table('a').reconfigure({emergency_repair:null, shards:1, replicas:1, dry_run:true})
|
||||
rb: db.table('a').reconfigure(:emergency_repair => null, :shards => 1, :replicas => 1, :dry_run => true)
|
||||
ot: partial({'reconfigured':0})
|
||||
|
||||
- cd: db.table_drop('a')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
# Table errors
|
||||
- cd: db.table_create('foo')
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- cd: db.table_create('foo')
|
||||
ot: err('ReqlOpFailedError', 'Table `test.foo` already exists.', [0])
|
||||
|
||||
- cd: db.table_drop('foo')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- cd: db.table_drop('foo')
|
||||
ot: err('ReqlOpFailedError', 'Table `test.foo` does not exist.', [0])
|
||||
|
||||
- cd: db.table_create('nonsense', 'foo')
|
||||
ot:
|
||||
js: err('ReqlCompileError', 'Expected 1 argument (not including options) but found 2.', [])
|
||||
rb: err("ReqlCompileError", "Expected between 1 and 2 arguments but found 3.", [])
|
||||
py: err("ReqlCompileError", "Expected between 1 and 2 arguments but found 3.", [])
|
||||
|
||||
- js: db.table_create('nonsense', {'foo':'bar'})
|
||||
py: db.table_create('nonsense', foo='bar')
|
||||
rb: db.table_create('nonsense', :foo => 'bar')
|
||||
ot: err('ReqlCompileError', "Unrecognized optional argument `foo`.", [])
|
||||
|
||||
# RSI(reql_admin): Add tests for table_create() with configuration parameters
|
||||
|
||||
# Table reconfigure errors
|
||||
- cd: db.table_create('a')
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- py: db.table('a').reconfigure(shards=0, replicas=1)
|
||||
js: db.table('a').reconfigure({shards:0, replicas:1})
|
||||
rb: db.table('a').reconfigure(:shards => 0, :replicas => 1)
|
||||
ot: err('ReqlQueryLogicError', 'Every table must have at least one shard.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas={"default":1}, primary_replica_tag="foo")
|
||||
js: db.table('a').reconfigure({shards:1, replicas:{default:1}, primary_replica_tag:"foo"})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => {:default => 1}, :primary_replica_tag => "foo")
|
||||
ot: err('ReqlOpFailedError', 'Can\'t use server tag `foo` for primary replicas because you specified no replicas in server tag `foo`.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas={"default":1}, primary_replica_tag="default", nonvoting_replica_tags=["foo"])
|
||||
js: db.table('a').reconfigure({shards:1, replicas:{"default":1}, primary_replica_tag:"default", nonvoting_replica_tags:["foo"]})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => {:default => 1}, :primary_replica_tag => "default", :nonvoting_replica_tags => ["foo"])
|
||||
ot: err('ReqlOpFailedError', 'You specified that the replicas in server tag `foo` should be non-voting, but you didn\'t specify a number of replicas in server tag `foo`.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas={"foo":0}, primary_replica_tag="foo")
|
||||
js: db.table('a').reconfigure({shards:1, replicas:{foo:0}, primary_replica_tag:"foo"})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => {:foo => 0}, :primary_replica_tag => "foo")
|
||||
ot: err('ReqlOpFailedError', 'You must set `replicas` to at least one. `replicas` includes the primary replica; if there are zero replicas, there is nowhere to put the data.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas={"default":0})
|
||||
js: db.table('a').reconfigure({shards:1, replicas:{default:0}})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => {:default => 0})
|
||||
ot: err('ReqlQueryLogicError', '`primary_replica_tag` must be specified when `replicas` is an OBJECT.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas={"default":-3}, primary_replica_tag='default')
|
||||
js: db.table('a').reconfigure({shards:1, replicas:{default:-3}, primary_replica_tag:'default'})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => {:default => -3}, :primary_replica_tag => 'default')
|
||||
ot: err('ReqlQueryLogicError', 'Can\'t have a negative number of replicas', [])
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas=3, primary_replica_tag='foo')
|
||||
js: db.table('a').reconfigure({shards:1, replicas:3, primary_replica_tag:'foo'})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => 3, :primary_replica_tag => 'foo')
|
||||
ot: err('ReqlQueryLogicError', '`replicas` must be an OBJECT if `primary_replica_tag` is specified.', [])
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas=3, nonvoting_replica_tags=['foo'])
|
||||
js: db.table('a').reconfigure({shards:1, replicas:3, nonvoting_replica_tags:['foo']})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => 3, :nonvoting_replica_tags => ['foo'])
|
||||
ot: err('ReqlQueryLogicError', '`replicas` must be an OBJECT if `nonvoting_replica_tags` is specified.', [])
|
||||
|
||||
- py: db.reconfigure(emergency_repair="unsafe_rollback")
|
||||
js: db.reconfigure({emergency_repair:"unsafe_rollback"})
|
||||
rb: db.reconfigure(:emergency_repair => "unsafe_rollback")
|
||||
ot: err('ReqlQueryLogicError', 'Can\'t emergency repair an entire database at once; instead you should run `reconfigure()` on each table individually.')
|
||||
|
||||
- py: db.table('a').reconfigure(emergency_repair="foo")
|
||||
js: db.table('a').reconfigure({emergency_repair:"foo"})
|
||||
rb: db.table('a').reconfigure(:emergency_repair => "foo")
|
||||
ot: err('ReqlQueryLogicError', '`emergency_repair` should be "unsafe_rollback" or "unsafe_rollback_or_erase"', [])
|
||||
|
||||
- py: db.table('a').reconfigure(emergency_repair="unsafe_rollback", shards=1, replicas=1)
|
||||
js: db.table('a').reconfigure({emergency_repair:"unsafe_rollback", shards:1, replicas:1})
|
||||
rb: db.table('a').reconfigure(:emergency_repair => "unsafe_rollback", :shards => 1, :replicas => 1)
|
||||
ot: err('ReqlQueryLogicError', 'In emergency repair mode, you can\'t specify shards, replicas, etc.')
|
||||
|
||||
# Test reconfigure auto-sharding without data
|
||||
- py: db.table('a').reconfigure(shards=2, replicas=1)
|
||||
js: db.table('a').reconfigure({shards:2, replicas:1})
|
||||
rb: db.table('a').reconfigure(:shards => 2, :replicas => 1)
|
||||
ot: partial({'reconfigured':1})
|
||||
|
||||
- py: db.table('a').wait(wait_for="all_replicas_ready")
|
||||
js: db.table('a').wait({"waitFor":"all_replicas_ready"})
|
||||
rb: db.table('a').wait(:wait_for=>"all_replicas_ready")
|
||||
ot: {"ready":1}
|
||||
|
||||
# Insert some data so that `reconfigure()` can pick shard points
|
||||
- py: db.table('a').insert([{"id":1}, {"id":2}, {"id":3}, {"id":4}])
|
||||
js: db.table('a').insert([{id:1}, {id:2}, {id:3}, {id:4}])
|
||||
rb: db.table('a').insert([{"id" => 1}, {"id" => 2}, {"id" => 3}, {"id" => 4}])
|
||||
ot: partial({"inserted":4})
|
||||
|
||||
- py: db.table('a').reconfigure(shards=2, replicas=1)
|
||||
js: db.table('a').reconfigure({shards:2, replicas:1})
|
||||
rb: db.table('a').reconfigure(:shards => 2, :replicas => 1)
|
||||
ot: partial({'reconfigured':1})
|
||||
|
||||
- py: db.table('a').reconfigure(shards=1, replicas=2)
|
||||
js: db.table('a').reconfigure({shards:1, replicas:2})
|
||||
rb: db.table('a').reconfigure(:shards => 1, :replicas => 2)
|
||||
ot: err('ReqlOpFailedError', 'Can\'t put 2 replicas on servers with the tag `default` because there are only 1 servers with the tag `default`. It\'s impossible to have more replicas of the data than there are servers.', [])
|
||||
|
||||
# Test wait and rebalance
|
||||
- py: db.table('a').wait(wait_for="all_replicas_ready")
|
||||
js: db.table('a').wait({"waitFor":"all_replicas_ready"})
|
||||
rb: db.table('a').wait(:wait_for=>"all_replicas_ready")
|
||||
ot: {"ready":1}
|
||||
- cd: db.table('a').rebalance()
|
||||
ot: partial({'rebalanced':1})
|
||||
|
||||
- py: db.wait(wait_for="all_replicas_ready")
|
||||
js: db.wait({"waitFor":"all_replicas_ready"})
|
||||
rb: db.wait(:wait_for=>"all_replicas_ready")
|
||||
ot: {"ready":1}
|
||||
- cd: db.rebalance()
|
||||
ot: partial({'rebalanced':1})
|
||||
|
||||
- cd: r.wait()
|
||||
ot:
|
||||
py: err('AttributeError', "'module' object has no attribute 'wait'", [])
|
||||
# different sub-versions of node have different messages #5617
|
||||
js: err('TypeError')
|
||||
rb: err('ReqlQueryLogicError', '`wait` can only be called on a table or database.', [])
|
||||
- cd: r.rebalance()
|
||||
ot:
|
||||
py: err('AttributeError', "'module' object has no attribute 'rebalance'", [])
|
||||
# different sub-versions of node have different messages #5617
|
||||
js: err('TypeError')
|
||||
rb: err('ReqlQueryLogicError', '`rebalance` can only be called on a table or database.', [])
|
||||
|
||||
- cd: db.table_drop('a')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
# Reconfiguring all tables in a database
|
||||
- cd: db.table_create('a')
|
||||
- cd: db.table_create('b')
|
||||
- cd: db.table_create('c')
|
||||
|
||||
- py: db.reconfigure(shards=0, replicas=1)
|
||||
js: db.reconfigure({shards:0, replicas:1})
|
||||
rb: db.reconfigure(:shards => 0, :replicas => 1)
|
||||
ot: err('ReqlQueryLogicError', 'Every table must have at least one shard.', [])
|
||||
|
||||
- py: db.reconfigure(shards=1, replicas={"default":0})
|
||||
js: db.reconfigure({shards:1, replicas:{default:0}})
|
||||
rb: db.reconfigure(:shards => 1, :replicas => {:default => 0})
|
||||
ot: err('ReqlQueryLogicError', '`primary_replica_tag` must be specified when `replicas` is an OBJECT.', [])
|
||||
|
||||
- py: db.reconfigure(shards=1, replicas={"default":-3}, primary_replica_tag='default')
|
||||
js: db.reconfigure({shards:1, replicas:{default:-3}, primary_replica_tag:'default'})
|
||||
rb: db.reconfigure(:shards => 1, :replicas => {:default => -3}, :primary_replica_tag => 'default')
|
||||
ot: err('ReqlQueryLogicError', 'Can\'t have a negative number of replicas', [])
|
||||
|
||||
- py: db.reconfigure(shards=1, replicas=3, primary_replica_tag='foo')
|
||||
js: db.reconfigure({shards:1, replicas:3, primary_replica_tag:'foo'})
|
||||
rb: db.reconfigure(:shards => 1, :replicas => 3, :primary_replica_tag => 'foo')
|
||||
ot: err('ReqlQueryLogicError', '`replicas` must be an OBJECT if `primary_replica_tag` is specified.', [])
|
||||
|
||||
- py: db.reconfigure(shards=2, replicas=1)
|
||||
js: db.reconfigure({shards:2, replicas:1})
|
||||
rb: db.reconfigure(:shards => 2, :replicas => 1)
|
||||
ot: partial({'reconfigured':3})
|
||||
|
||||
- cd: db.table_drop('a')
|
||||
ot: partial({'tables_dropped':1})
|
||||
- cd: db.table_drop('b')
|
||||
ot: partial({'tables_dropped':1})
|
||||
- cd: db.table_drop('c')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
# table_config and table_status porcelains
|
||||
- cd: r.db_create("test2")
|
||||
ot: partial({'dbs_created':1})
|
||||
|
||||
- def: db2 = r.db("test2")
|
||||
|
||||
- cd: db.table_create("testA")
|
||||
ot: partial({'tables_created':1})
|
||||
- cd: db.table_create("testB")
|
||||
ot: partial({'tables_created':1})
|
||||
- cd: db2.table_create("test2B")
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- cd: r.table('testA').config().pluck('db','name')
|
||||
ot: {'db':'test','name':'testA'}
|
||||
|
||||
- cd: r.table('doesntexist').config()
|
||||
ot: err('ReqlOpFailedError', 'Table `test.doesntexist` does not exist.', [])
|
||||
|
||||
- cd: r.table('test2B').config()
|
||||
ot: err('ReqlOpFailedError', 'Table `test.test2B` does not exist.', [])
|
||||
|
||||
- cd: r.db('rethinkdb').table('table_config').filter({'name':'testA'}).nth(0).eq(r.table('testA').config())
|
||||
ot: True
|
||||
|
||||
- cd: r.db('rethinkdb').table('table_status').filter({'name':'testA'}).nth(0).eq(r.table('testA').status())
|
||||
ot: True
|
||||
|
||||
- py: r.db('rethinkdb').table('table_config', identifier_format='uuid').nth(0)["db"]
|
||||
js: r.db('rethinkdb').table('table_config', {identifierFormat:'uuid'}).nth(0)("db")
|
||||
rb: r.db('rethinkdb').table('table_config', {:identifier_format=>'uuid'}).nth(0)["db"]
|
||||
ot: uuid()
|
||||
|
||||
- py: r.table('testA', identifier_format='uuid').count()
|
||||
js: r.table('testA', {identifierFormat:'uuid'}).count()
|
||||
rb: r.table('testA', {:identifier_format=>'uuid'}).count()
|
||||
ot: 0
|
||||
|
||||
- py: r.wait(wait_for='all_replicas_ready', timeout=5)
|
||||
js: r.wait({waitFor:'all_replicas_ready', timeout:5})
|
||||
rb: r.wait(:wait_for=>'all_replicas_ready', :timeout => 5)
|
||||
ot:
|
||||
py: err('AttributeError', "'module' object has no attribute 'wait'", [])
|
||||
# different sub-versions of node have different messages #5617
|
||||
js: err('TypeError')
|
||||
rb: err('ReqlQueryLogicError', '`wait` can only be called on a table or database.', [])
|
||||
|
||||
- cd: db.table_drop('testA')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- cd: db.table_drop('testB')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
- cd: r.db_drop('test2')
|
||||
ot: partial({'dbs_dropped':1,'tables_dropped':1})
|
@ -1,93 +0,0 @@
|
||||
desc: Tests replacement of selections
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# old version of argument
|
||||
- cd: tbl.insert({'id':0}, :return_vals => true).pluck('changes', 'first_error')
|
||||
py: tbl.insert({'id':0}, return_vals=True).pluck('changes', 'first_error')
|
||||
js: tbl.insert({'id':0}, {'return_vals':true}).pluck('changes', 'first__error')
|
||||
ot: err("ReqlQueryLogicError", "Error:"+" encountered obsolete optarg `return_vals`. Use `return_changes` instead.", [0])
|
||||
|
||||
- cd: tbl.insert({'id':0}, :return_changes => true).pluck('changes', 'first_error')
|
||||
py: tbl.insert({'id':0}, return_changes=True).pluck('changes', 'first_error')
|
||||
js: tbl.insert({'id':0}, {'return__changes':true}).pluck('changes', 'first__error')
|
||||
ot: ({'changes':[{'old_val':null,'new_val':{'id':0}}]})
|
||||
- cd: tbl.insert({'id':0}, :return_changes => true).pluck('changes', 'first_error')
|
||||
py: tbl.insert({'id':0}, return_changes=True).pluck('changes', 'first_error')
|
||||
js: tbl.insert({'id':0}, {'return__changes':true}).pluck('changes', 'first__error')
|
||||
ot: ({'changes':[], 'first_error':"Duplicate primary key `id`:\n{\n\t\"id\":\t0\n}\n{\n\t\"id\":\t0\n}"})
|
||||
- cd: tbl.insert({'id':0}, return_changes:'always').pluck('changes', 'first_error')
|
||||
py: tbl.insert({'id':0}, return_changes='always').pluck('changes', 'first_error')
|
||||
js: tbl.insert({'id':0}, {'return__changes':'always'}).pluck('changes', 'first__error')
|
||||
ot: ({'first_error':"Duplicate primary key `id`:\n{\n\t\"id\":\t0\n}\n{\n\t\"id\":\t0\n}",'changes':[{'old_val':{'id':0},'new_val':{'id':0},'error':"Duplicate primary key `id`:\n{\n\t\"id\":\t0\n}\n{\n\t\"id\":\t0\n}"}]})
|
||||
- cd: tbl.insert([{'id':1}], :return_changes => true)
|
||||
py: tbl.insert([{'id':1}], return_changes=True)
|
||||
js: tbl.insert([{'id':1}], {'return__changes':true})
|
||||
ot: ({'changes':[{'new_val':{'id':1},'old_val':null}], 'errors':0, 'deleted':0, 'unchanged':0, 'skipped':0, 'replaced':0, 'inserted':1})
|
||||
- cd: tbl.insert([{'id':0}], :return_changes => true).pluck('changes', 'first_error')
|
||||
py: tbl.insert([{'id':0}], return_changes=True).pluck('changes', 'first_error')
|
||||
js: tbl.insert([{'id':0}], {'return__changes':true}).pluck('changes', 'first__error')
|
||||
ot: ({'changes':[],'first_error':"Duplicate primary key `id`:\n{\n\t\"id\":\t0\n}\n{\n\t\"id\":\t0\n}"})
|
||||
|
||||
- cd: tbl.get(0).update({'x':1}, :return_changes => true).pluck('changes', 'first_error')
|
||||
py: tbl.get(0).update({'x':1}, return_changes=True).pluck('changes', 'first_error')
|
||||
js: tbl.get(0).update({'x':1}, {'return__changes':true}).pluck('changes', 'first__error')
|
||||
ot: ({'changes':[{'old_val':{'id':0},'new_val':{'id':0,'x':1}}]})
|
||||
- cd: tbl.get(0).update({'x':r.error("a")}, :return_changes => true).pluck('changes', 'first_error')
|
||||
py: tbl.get(0).update({'x':r.error("a")}, return_changes=True).pluck('changes', 'first_error')
|
||||
js: tbl.get(0).update({'x':r.error("a")}, {'return__changes':true}).pluck('changes', 'first__error')
|
||||
ot: ({'changes':[],'first_error':'a'})
|
||||
- rb: tbl.update({'x':3}, :return_changes => true).pluck('changes', 'first_error').do {|d| d.merge({:changes => d['changes'].order_by {|a| a['old_val']['id']}})}
|
||||
py: tbl.update({'x':3}, return_changes=True).pluck('changes', 'first_error').do(lambda d:d.merge({'changes':d['changes'].order_by(lambda a:a['old_val']['id'])}))
|
||||
js: tbl.update({'x':3}, {'return__changes':true}).pluck('changes', 'first__error').do(function(p){return p.merge({'changes':p('changes').orderBy(function(a){return a('old__val')('id')})})})
|
||||
ot: ({'changes':[{'old_val':{'id':0, 'x':1},'new_val':{'id':0, 'x':3}}, {'old_val':{'id':1},'new_val':{'id':1, 'x':3}}]})
|
||||
|
||||
- cd: tbl.get(0).replace({'id':0,'x':2}, :return_changes => true).pluck('changes', 'first_error')
|
||||
py: tbl.get(0).replace({'id':0,'x':2}, return_changes=True).pluck('changes', 'first_error')
|
||||
js: tbl.get(0).replace({'id':0,'x':2}, {'return__changes':true}).pluck('changes', 'first__error')
|
||||
ot: ({'changes':[{'old_val':{'id':0,'x':3},'new_val':{'id':0,'x':2}}]})
|
||||
- cd: tbl.get(0).replace(:return_changes => true){{'x':r.error('a')}}.pluck('changes', 'first_error')
|
||||
py: tbl.get(0).replace(lambda y:{'x':r.error('a')}, return_changes=True).pluck('changes', 'first_error')
|
||||
js: tbl.get(0).replace(function(y){return {'x':r.error('a')}}, {'return__changes':true}).pluck('changes', 'first__error')
|
||||
ot: ({'changes':[],'first_error':'a'})
|
||||
- cd: tbl.get(0).replace(:return_changes => 'always'){{'x':r.error('a')}}.pluck('changes', 'first_error')
|
||||
py: tbl.get(0).replace(lambda y:{'x':r.error('a')}, return_changes='always').pluck('changes', 'first_error')
|
||||
js: tbl.get(0).replace(function(y){return {'x':r.error('a')}}, {'return__changes':'always'}).pluck('changes', 'first__error')
|
||||
ot: ({'first_error':'a','changes':[{'old_val':{'id':0,'x':2},'new_val':{'id':0,'x':2},'error':'a'}]})
|
||||
- rb: tbl.replace( :return_changes => true) { |d| d.without('x')}.pluck('changes', 'first_error').do {|d| d.merge({:changes => d['changes'].order_by {|a| a['old_val']['id']}})}
|
||||
py: tbl.replace(lambda y:y.without('x'), return_changes=True).pluck('changes', 'first_error').do(lambda d:d.merge({'changes':d['changes'].order_by(lambda a:a['old_val']['id'])}))
|
||||
js: tbl.replace(function(p){return p.without('x')}, {'return__changes':true}).pluck('changes', 'first__error').do(function(p){return p.merge({'changes':p('changes').orderBy(function(a){return a('old__val')('id')})})})
|
||||
ot: ({'changes':[{'new_val':{'id':0},'old_val':{'id':0, 'x':2}}, {'new_val':{'id':1},'old_val':{'id':1,'x':3}}]})
|
||||
- rb: tbl.replace({'x':1}, :return_changes => 'always').pluck('changes', 'first_error').do {|d| d.merge({:changes => d['changes'].order_by {|a| a['old_val']['id']}})}
|
||||
py: tbl.replace({'x':1}, return_changes='always').pluck('changes', 'first_error').do(lambda d:d.merge({'changes':d['changes'].order_by(lambda a:a['old_val']['id'])}))
|
||||
js: tbl.replace({'x':1}, {'return__changes':'always'}).pluck('changes', 'first__error').do(function(p){return p.merge({'changes':p('changes').orderBy(function(a){return a('old__val')('id')})})})
|
||||
ot: ({'first_error':"Inserted object must have primary key `id`:\n{\n\t\"x\":\t1\n}", 'changes':[{'new_val':{'id':0},'old_val':{'id':0}, 'error':"Inserted object must have primary key `id`:\n{\n\t\"x\":\t1\n}"}, {'new_val':{'id':1},'old_val':{'id':1},'error':"Inserted object must have primary key `id`:\n{\n\t\"x\":\t1\n}"}]})
|
||||
|
||||
- rb: tbl.foreach{|row| [tbl.get(0).update(null, :return_changes => true), tbl.get(0).update({a:1}, :return_changes => true)]}.pluck('changes', 'first_error').do {|d| d.merge({:changes => d['changes'].order_by {|a| a['old_val']['id']}})}
|
||||
ot: ({'changes':[{"new_val"=>{"a"=>1, "id"=>0}, "old_val"=>{"id"=>0}}]})
|
||||
- rb: tbl.get(0).update({a:r.literal()})['replaced']
|
||||
ot: 1
|
||||
|
||||
- rb: tbl.get(0).update({}).pluck('changes')
|
||||
ot: ({})
|
||||
- rb: tbl.get(0).update({}, return_changes:true).pluck('changes')
|
||||
ot: ({'changes':[]})
|
||||
- rb: tbl.get(0).update({}, return_changes:'always').pluck('changes')
|
||||
ot: ({'changes':[{'new_val':{'id':0}, 'old_val':{'id':0}}]})
|
||||
|
||||
- rb: tbl.get(-1).update({}).pluck('changes')
|
||||
ot: ({})
|
||||
- rb: tbl.get(-1).update({}, return_changes:true).pluck('changes')
|
||||
ot: ({'changes':[]})
|
||||
- rb: tbl.get(-1).update({}, return_changes:'always').pluck('changes')
|
||||
ot: ({'changes':[{'new_val':null, 'old_val':null}]})
|
||||
|
||||
- cd: tbl.get(0).delete(:return_changes => true).pluck('changes', 'first_error')
|
||||
py: tbl.get(0).delete(return_changes=True).pluck('changes', 'first_error')
|
||||
js: tbl.get(0).delete({'return__changes':true}).pluck('changes', 'first__error')
|
||||
ot: ({'changes':[{'old_val':{'id':0},'new_val':null}]})
|
||||
- cd: tbl.delete(:return_changes => true)
|
||||
py: tbl.delete(return_changes=True)
|
||||
js: tbl.delete({'return__changes':true})
|
||||
ot: ({'deleted':1,'errors':0,'inserted':0,'replaced':0,'skipped':0,'unchanged':0,'changes':[{'new_val':null, 'old_val':{'id':1}}]})
|
||||
|
@ -1,50 +0,0 @@
|
||||
desc: Tests deletes of selections
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# Set up some data
|
||||
|
||||
- py: tbl.insert([{'id':i} for i in xrange(100)])
|
||||
js: |
|
||||
tbl.insert(function(){
|
||||
var res = []
|
||||
for (var i = 0; i < 100; i++) {
|
||||
res.push({id: i});
|
||||
}
|
||||
return res;
|
||||
}())
|
||||
rb: tbl.insert((1..100).map{ |i| {"id" => i} })
|
||||
ot: ({'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':100})
|
||||
|
||||
- cd: tbl.count()
|
||||
ot: 100
|
||||
|
||||
# Point delete
|
||||
|
||||
- cd: tbl.get(12).delete()
|
||||
ot: ({'deleted':1,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':0})
|
||||
|
||||
# Attempt deletion with bad durability flag.
|
||||
|
||||
- js: tbl.skip(50).delete({durability:'wrong'})
|
||||
rb: tbl.skip(50).delete({ :durability => 'wrong' })
|
||||
py: tbl.skip(50).delete(durability='wrong')
|
||||
ot: err('ReqlQueryLogicError', 'Durability option `wrong` unrecognized (options are "hard" and "soft").', [0])
|
||||
|
||||
# Delete selection of table, soft durability flag.
|
||||
|
||||
- js: tbl.skip(50).delete({durability:'soft'})
|
||||
rb: tbl.skip(50).delete({ :durability => 'soft' })
|
||||
py: tbl.skip(50).delete(durability='soft')
|
||||
ot: ({'deleted':49,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':0})
|
||||
|
||||
# Delete whole table, hard durability flag.
|
||||
|
||||
- js: tbl.delete({durability:'hard'})
|
||||
rb: tbl.delete({ :durability => 'hard' })
|
||||
py: tbl.delete(durability='hard')
|
||||
ot: ({'deleted':50,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':0})
|
||||
|
||||
# test deletion on a non-deletable object
|
||||
- cd: r.expr([1, 2]).delete()
|
||||
ot: err('ReqlQueryLogicError', 'Expected type SELECTION but found DATUM:', [0])
|
@ -1,239 +0,0 @@
|
||||
desc: Tests insertion into tables
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# Set up our secondary test table
|
||||
- cd: r.db('test').table_create('test2')
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- def: tbl2 = r.db('test').table('test2')
|
||||
|
||||
# Single doc insert
|
||||
- cd: tbl.insert({'id':0,'a':0})
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
- cd: tbl.count()
|
||||
ot: 1
|
||||
|
||||
# Hard durability insert
|
||||
- py: tbl.insert({'id':1, 'a':1}, durability='hard')
|
||||
js: tbl.insert({id:1, a:1}, {durability:'hard'})
|
||||
rb: tbl.insert({ :id => 1, :a => 1 }, { :durability => 'hard' })
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
- cd: tbl.count()
|
||||
ot: 2
|
||||
|
||||
# Soft durability insert
|
||||
- py: tbl.insert({'id':2, 'a':2}, durability='soft')
|
||||
js: tbl.insert({id:2, a:2}, {durability:'soft'})
|
||||
rb: tbl.insert({ :id => 2, :a => 2 }, { :durability => 'soft' })
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
- cd: tbl.count()
|
||||
ot: 3
|
||||
|
||||
# Wrong durability insert
|
||||
- py: tbl.insert({'id':3, 'a':3}, durability='wrong')
|
||||
js: tbl.insert({id:3, a:3}, {durability:'wrong'})
|
||||
rb: tbl.insert({ :id => 3, :a => 3 }, { :durability => 'wrong' })
|
||||
ot: err('ReqlQueryLogicError', 'Durability option `wrong` unrecognized (options are "hard" and "soft").', [0])
|
||||
- cd: tbl.count()
|
||||
ot: 3
|
||||
|
||||
# Cleanup.
|
||||
- cd: tbl.get(2).delete()
|
||||
ot: {'deleted':1,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':0}
|
||||
|
||||
# Multi doc insert
|
||||
- cd: tbl.insert([{'id':2,'a':2}, {'id':3,'a':3}])
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':2}
|
||||
|
||||
# Stream insert
|
||||
- cd: tbl2.insert(tbl)
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':4}
|
||||
|
||||
# test pkey clash error
|
||||
- cd: tbl.insert({'id':2,'b':20})
|
||||
ot: {'first_error':"Duplicate primary key `id`:\n{\n\t\"a\":\t2,\n\t\"id\":\t2\n}\n{\n\t\"b\":\t20,\n\t\"id\":\t2\n}",'deleted':0,'replaced':0,'unchanged':0,'errors':1,'skipped':0,'inserted':0}
|
||||
|
||||
# test error conflict option (object exists)
|
||||
- py: tbl.insert({'id':2,'b':20}, conflict='error')
|
||||
js: tbl.insert({'id':2,'b':20}, {conflict:'error'})
|
||||
rb: tbl.insert({:id => 2, :b => 20}, { :conflict => 'error' })
|
||||
ot: {'first_error':"Duplicate primary key `id`:\n{\n\t\"a\":\t2,\n\t\"id\":\t2\n}\n{\n\t\"b\":\t20,\n\t\"id\":\t2\n}",'deleted':0,'replaced':0,'unchanged':0,'errors':1,'skipped':0,'inserted':0}
|
||||
|
||||
# test error conflict option (object doesn't exist)
|
||||
- py: tbl.insert({'id':15,'b':20}, conflict='error')
|
||||
js: tbl.insert({'id':15,'b':20}, {conflict:'error'})
|
||||
rb: tbl.insert({:id => 15, :b => 20}, { :conflict => 'error' })
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
|
||||
- cd: tbl.get(15)
|
||||
ot: {'id':15,'b':20}
|
||||
|
||||
# test replace conflict option (object exists)
|
||||
- py: tbl.insert({'id':2,'b':20}, conflict='replace')
|
||||
js: tbl.insert({'id':2,'b':20}, {conflict:'replace'})
|
||||
rb: tbl.insert({:id => 2, :b => 20}, { :conflict => 'replace' })
|
||||
ot: {'deleted':0,'replaced':1,'unchanged':0,'errors':0,'skipped':0,'inserted':0}
|
||||
|
||||
- cd: tbl.get(2)
|
||||
ot: {'id':2,'b':20}
|
||||
|
||||
# test replace conflict option (object doesn't exist)
|
||||
- py: tbl.insert({'id':20,'b':20}, conflict='replace')
|
||||
js: tbl.insert({'id':20,'b':20}, {conflict:'replace'})
|
||||
rb: tbl.insert({:id => 20, :b => 20}, { :conflict => 'replace' })
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
|
||||
- cd: tbl.get(20)
|
||||
ot: {'id':20,'b':20}
|
||||
|
||||
# test update conflict option (object exists)
|
||||
- py: tbl.insert({'id':2,'c':30}, conflict='update')
|
||||
js: tbl.insert({'id':2,'c':30}, {conflict:'update'})
|
||||
rb: tbl.insert({:id => 2, :c => 30}, { :conflict => 'update' })
|
||||
ot: {'deleted':0,'replaced':1,'unchanged':0,'errors':0,'skipped':0,'inserted':0}
|
||||
|
||||
- cd: tbl.get(2)
|
||||
ot: {'id':2, 'b':20, 'c':30}
|
||||
|
||||
# test update conflict option (object doesn't exist)
|
||||
- py: tbl.insert({'id':30,'b':20}, conflict='update')
|
||||
js: tbl.insert({'id':30,'b':20}, {conflict:'update'})
|
||||
rb: tbl.insert({:id => 30, :b => 20}, { :conflict => 'update' })
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
|
||||
- cd: tbl.get(30)
|
||||
ot: {'id':30,'b':20}
|
||||
|
||||
# test incorrect conflict option
|
||||
- py: tbl.insert({'id':3, 'a':3}, conflict='wrong')
|
||||
js: tbl.insert({id:3, a:3}, {conflict:'wrong'})
|
||||
rb: tbl.insert({ :id => 3, :a => 3 }, { :conflict => 'wrong' })
|
||||
ot: err('ReqlQueryLogicError', 'Conflict option `wrong` unrecognized (options are "error", "replace" and "update").', [0])
|
||||
|
||||
# test auto pkey generation
|
||||
- py: r.db('test').table_create('testpkey', primary_key='foo')
|
||||
js: r.db('test').tableCreate('testpkey', {primaryKey:'foo'})
|
||||
rb: r.db('test').table_create('testpkey', { :primary_key => 'foo' })
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
def: tblpkey = r.db('test').table('testpkey')
|
||||
|
||||
- cd: tblpkey.insert({})
|
||||
ot: {'deleted':0,'replaced':0,'generated_keys':arrlen(1,uuid()),'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
|
||||
- cd: tblpkey
|
||||
ot: [{'foo':uuid()}]
|
||||
|
||||
# test replace conflict pkey generation
|
||||
- py: tblpkey.insert({'b':20}, conflict='replace')
|
||||
js: tblpkey.insert({'b':20}, {conflict:'replace'})
|
||||
rb: tblpkey.insert({:b => 20}, { :conflict => 'replace' })
|
||||
ot: {'deleted':0,'replaced':0,'generated_keys':arrlen(1,uuid()),'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
|
||||
# test update conflict pkey generation
|
||||
- py: tblpkey.insert({'b':20}, conflict='update')
|
||||
js: tblpkey.insert({'b':20}, {conflict:'update'})
|
||||
rb: tblpkey.insert({:b => 20}, { :conflict => 'update' })
|
||||
ot: {'deleted':0,'replaced':0,'generated_keys':arrlen(1,uuid()),'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
|
||||
- cd: r.db('test').table_drop('testpkey')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
||||
# Insert within for each
|
||||
- py: tbl.for_each(lambda row: tbl2.insert(row.merge({'id':row['id'] + 100 })) )
|
||||
js: tbl.forEach(function(row) { return tbl2.insert(row.merge({'id':row('id').add(100)})); })
|
||||
rb: tbl.for_each(proc { |row| tbl2.insert(row.merge({'id'=>row['id'] + 100 })) })
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':7}
|
||||
|
||||
# Insert unwritable data
|
||||
- cd: tbl.insert({'value':r.minval})
|
||||
rb: tbl.insert({:value => r.minval})
|
||||
ot: partial({'errors':1,'first_error':'`r.minval` and `r.maxval` cannot be written to disk.'})
|
||||
|
||||
- cd: tbl.insert({'value':r.maxval})
|
||||
rb: tbl.insert({:value => r.maxval})
|
||||
ot: partial({'errors':1,'first_error':'`r.minval` and `r.maxval` cannot be written to disk.'})
|
||||
|
||||
# Crash 5683
|
||||
- py: tbl.insert([{'id':666}, {'id':666}], return_changes="always")
|
||||
ot: {'changes': [{'new_val': {'id': 666}, 'old_val': None},{'error': 'Duplicate primary key `id`:\n{\n\t"id":\t666\n}\n{\n\t"id":\t666\n}','new_val': {'id': 666},'old_val': {'id': 666}}],'deleted': 0,'errors': 1,'first_error': 'Duplicate primary key `id`:\n{\n\t"id":\t666\n}\n{\n\t"id":\t666\n}','inserted': 1,'replaced': 0,'skipped': 0,'unchanged': 0}
|
||||
|
||||
# Confirm inserts are ordered in return_changes always
|
||||
- py: tbl.insert([{'id':100+i, 'ordered-num':i} for i in range(1,100)], return_changes="always")
|
||||
ot: partial({'changes':[{'old_val': None, 'new_val': {'id': 100+i, 'ordered-num': i}} for i in range(1,100)] })
|
||||
|
||||
# Confirm inserts are ordered in return_changes always with complicated key
|
||||
- py: tbl.insert([{'id':[1, "blah", 200+i], 'ordered-num':i} for i in range(1,100)], return_changes="always")
|
||||
ot: partial({'changes':[{'old_val': None, 'new_val': {'id': [1,"blah", 200+i], 'ordered-num': i}} for i in range(1,100)] })
|
||||
|
||||
# Confirm inserts are ordered in return_changes always with return_changes=true
|
||||
- py: tbl.insert([{'id':[1, "blah", 300+i], 'ordered-num':i} for i in range(1,100)], return_changes=true)
|
||||
ot: partial({'changes':[{'old_val': None, 'new_val': {'id': [1,"blah", 300+i], 'ordered-num': i}} for i in range(1,100)] })
|
||||
|
||||
# Confirm errors are property returned with return_changes="always"
|
||||
- py: tbl.insert([{'id':100 + i, 'ordered-num':i} for i in range(1,100)], return_changes="always")
|
||||
ot: partial({'changes':[{'old_val': {'id':100+i, 'ordered-num':i}, 'new_val': {'id':100+i, 'ordered-num':i}, 'error':'Duplicate primary key `id`:\n{\n\t"id":\t'+str(100+i)+',\n\t"ordered-num":\t'+str(i)+'\n}\n{\n\t"id":\t'+str(100+i)+',\n\t"ordered-num":\t'+str(i)+'\n}'} for i in range(1,100)]})
|
||||
|
||||
# Trivial errors with return_changes="always", this has to be long test, testing order and message for this type
|
||||
- py: tbl.insert([{'id':123}, {'id':'a'*500}, {'id':321}], return_changes="always")
|
||||
ot: {'changes': [{'error': 'Duplicate primary key `id`:\n{\n\t"id":\t123,\n\t"ordered-num":\t23\n}\n{\n\t"id":\t123\n}', 'new_val': {'id': 123, 'ordered-num': 23}, 'old_val': {'id': 123, 'ordered-num': 23}}, {'error': 'Primary key too long (max 127 characters): "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"', 'new_val': None, 'old_val': None}, {'new_val': {'id': 321}, 'old_val': None}], 'deleted': 0, 'errors': 2, 'first_error': 'Primary key too long (max 127 characters): "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"', 'inserted': 1, 'replaced': 0, 'skipped': 0, 'unchanged': 0}
|
||||
|
||||
# No errors returned with return_changes=true
|
||||
- py: tbl.insert([{'id':100 + i, 'ordered-num':i} for i in range(1,100)], return_changes=true)
|
||||
ot: partial({'changes':[]})
|
||||
|
||||
- py: tbl.insert({'a':r.minval}, return_changes="always")
|
||||
ot: partial({'changes': [{'old_val': None, 'new_val': None, 'error': '`r.minval` and `r.maxval` cannot be written to disk.'}]})
|
||||
|
||||
# Tests for insert conflict resolution function
|
||||
|
||||
# Using a conflict function
|
||||
- cd: tbl.insert({'id':42, 'foo':1, 'bar':1})
|
||||
ot: partial({'inserted':1})
|
||||
- py: tbl.insert({'id':42, 'foo':5, 'bar':5}, conflict=lambda id, old_row, new_row: old_row.merge(new_row.pluck("bar")))
|
||||
ot: partial({'replaced':1})
|
||||
- py: tbl.get(42)
|
||||
ot: {'id':42, 'foo':1, 'bar':5}
|
||||
- rb: tbl.insert({:id=>42, :foo=>6, :bar=>6}, conflict: lambda {|id, old_row, new_row| return old_row.merge(new_row.pluck("bar"))})
|
||||
ot: partial({'replaced':1})
|
||||
- rb: tbl.get(42)
|
||||
ot: {'id':42, 'foo':1, 'bar':6}
|
||||
- js: tbl.insert({'id':42, 'foo':7, 'bar':7}, {conflict: function(id, old_row, new_row) {return old_row.merge(new_row.pluck("bar"))}})
|
||||
ot: partial({'replaced':1})
|
||||
- js: tbl.get(42)
|
||||
ot: {'id':42, 'foo':1, 'bar':7}
|
||||
|
||||
# Inserting and deleting an item
|
||||
- js: tbl.insert({id: "toggle"},{conflict: function(x,y,z) { return null},returnChanges: true})
|
||||
ot: partial({'inserted': 1})
|
||||
- js: tbl.insert({id: "toggle"},{conflict: function(x,y,z) { return null},returnChanges: true})
|
||||
ot: partial({'deleted': 1})
|
||||
|
||||
# Returning the wrong thing from the conflict function
|
||||
- py: tbl.insert({'id':42, 'foo':1, 'bar':1}, conflict=lambda a,b,c: 2)
|
||||
ot: partial({'first_error': 'Inserted value must be an OBJECT (got NUMBER):\n2'})
|
||||
|
||||
# Incorrect Arity
|
||||
- py: tbl.insert({'id':42}, conflict=lambda a,b: a)
|
||||
ot: err("ReqlQueryLogicError", "The conflict function passed to `insert` should expect 3 arguments.")
|
||||
|
||||
# Non atomic operation
|
||||
- py: tbl.insert({'id':42}, conflict=lambda a,b,c: tbl.get(42))
|
||||
ot: err("ReqlQueryLogicError", "The conflict function passed to `insert` must be deterministic.")
|
||||
|
||||
- py: tbl.insert({'id':42}, conflict=lambda a,b,c: {'id':42, 'num':'424'})
|
||||
ot: partial({'replaced': 1})
|
||||
- py: tbl.get(42)
|
||||
ot: {'id':42, 'num':'424'}
|
||||
|
||||
# Get unreturnable data
|
||||
- cd: r.minval
|
||||
ot: err('ReqlQueryLogicError','Cannot convert `r.minval` to JSON.')
|
||||
|
||||
- cd: r.maxval
|
||||
ot: err('ReqlQueryLogicError','Cannot convert `r.maxval` to JSON.')
|
||||
|
||||
# clean up
|
||||
- cd: r.db('test').table_drop('test2')
|
||||
ot: partial({'tables_dropped':1})
|
@ -1,110 +0,0 @@
|
||||
desc: Tests replacement of selections
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# Set up some data
|
||||
|
||||
- py: tbl.insert([{'id':i} for i in xrange(100)])
|
||||
js: |
|
||||
tbl.insert(function(){
|
||||
var res = []
|
||||
for (var i = 0; i < 100; i++) {
|
||||
res.push({id:i});
|
||||
}
|
||||
return res;
|
||||
}())
|
||||
rb: tbl.insert((1..100).map{ |i| {:id => i } })
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':100})
|
||||
|
||||
- cd: tbl.count()
|
||||
ot: 100
|
||||
|
||||
# Identity
|
||||
|
||||
- py: tbl.get(12).replace(lambda row:{'id':row['id']})
|
||||
js: tbl.get(12).replace(function(row) { return {'id':row('id')}; })
|
||||
rb: tbl.get(12).replace{ |row| { :id => row[:id] } }
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'unchanged':1,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
# Replace single row
|
||||
|
||||
- py: tbl.get(12).replace(lambda row:{'id':row['id'], 'a':row['id']})
|
||||
js: tbl.get(12).replace(function(row) { return {'id':row('id'), 'a':row('id')}; })
|
||||
rb: tbl.get(12).replace{ |row| { :id => row[:id], :a => row[:id] } }
|
||||
ot: ({'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
- py: tbl.get(13).replace(lambda row:null)
|
||||
js: tbl.get(13).replace(function(row) { return null; })
|
||||
rb: tbl.get(13).replace{ |row| null }
|
||||
ot: ({'deleted':1,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
# Replace selection of table
|
||||
|
||||
- py: tbl.between(10, 20, right_bound='closed').replace(lambda row:{'a':1})
|
||||
js: tbl.between(10, 20, {'right_bound':'closed'}).replace(function(row) { return {'a':1}; })
|
||||
ot: ({'first_error':'Inserted object must have primary key `id`:\n{\n\t\"a\":\t1\n}','deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':10,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
- py: tbl.filter(lambda row:(row['id'] >= 10) & (row['id'] < 20)).replace(lambda row:{'id':row['id'], 'a':row['id']})
|
||||
js: tbl.filter(function(row) { return row('id').ge(10).and(row('id').lt(20))}).replace(function(row) { return {'id':row('id'), 'a':row('id')}; })
|
||||
rb: tbl.filter{ |row|
|
||||
(row[:id] >= 10).and(row[:id] < 20)
|
||||
}.replace{ |row|
|
||||
{ :id => row[:id], :a => row[:id] } }
|
||||
ot: ({'deleted':0.0,'replaced':8,'unchanged':1,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
# trying to change pkey of a document
|
||||
- cd: tbl.get(1).replace({'id':2,'a':1})
|
||||
ot: ({'first_error':"Primary key `id` cannot be changed (`{\n\t\"id\":\t1\n}` -> `{\n\t\"a\":\t1,\n\t\"id\":\t2\n}`).",'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':1,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
|
||||
# not passing a pkey in the first place
|
||||
- cd: tbl.get(1).replace({'a':1})
|
||||
ot: ({'first_error':"Inserted object must have primary key `id`:\n{\n\t\"a\":\t1\n}",'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':1,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
# check r.row, static value and otherwise
|
||||
- py: tbl.get(1).replace({'id':r.row['id'],'a':'b'})
|
||||
js: tbl.get(1).replace({'id':r.row('id'),'a':'b'})
|
||||
rb: tbl.get(1).replace{ |row| { :id => row[:id], :a => 'b' } }
|
||||
ot: ({'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
- cd: tbl.get(1).replace(r.row.merge({'a':'b'}))
|
||||
rb: tbl.get(1).replace{ |row| row.merge({'a':'b'}) }
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'unchanged':1,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
# test atomicity constraints
|
||||
- cd: tbl.get(1).replace(r.row.merge({'c':r.js('5')}))
|
||||
rb: tbl.get(1).replace{ |row| row.merge({'c':r.js('5')}) }
|
||||
ot: err('ReqlQueryLogicError', 'Could not prove argument deterministic. Maybe you want to use the non_atomic flag?', [0])
|
||||
|
||||
- cd: tbl.get(1).replace(r.row.merge({'c':tbl.nth(0)}))
|
||||
rb: tbl.get(1).replace{ |row| row.merge({'c':tbl.nth(0)}) }
|
||||
ot: err('ReqlQueryLogicError', 'Could not prove argument deterministic. Maybe you want to use the non_atomic flag?', [0])
|
||||
|
||||
- py: tbl.get(1).replace(r.row.merge({'c':r.js('5')}), non_atomic=True)
|
||||
js: tbl.get(1).replace(r.row.merge({'c':r.js('5')}), {'nonAtomic':true})
|
||||
rb: tbl.get(1).replace({ :non_atomic => true }){ |row| row.merge({ :c => r.js('5') })}
|
||||
ot: ({'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
- cd: tbl.get(1).replace({}, 'foo')
|
||||
ot:
|
||||
cd: err('ReqlCompileError', 'Expected 2 arguments but found 3.')
|
||||
js: err('ReqlCompileError', 'Expected 1 argument (not including options) but found 2.')
|
||||
|
||||
- cd: tbl.get(1).replace({}, {'foo':'bar'})
|
||||
py: tbl.get(1).replace({}, foo='bar')
|
||||
ot: err('ReqlCompileError', 'Unrecognized optional argument `foo`.')
|
||||
|
||||
# Replace whole table
|
||||
|
||||
- py: tbl.replace(lambda row:null)
|
||||
js: tbl.replace(function(row) { return null; })
|
||||
rb: tbl.replace{ |row| null }
|
||||
ot: ({'deleted':99,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0})
|
||||
|
||||
- cd: tbl.get('sdfjk').replace({'id':'sdfjk'})['inserted']
|
||||
js: tbl.get('sdfjk').replace({'id':'sdfjk'})('inserted')
|
||||
ot: 1
|
||||
- cd: tbl.get('sdfjki').replace({'id':'sdfjk'})['errors']
|
||||
js: tbl.get('sdfjki').replace({'id':'sdfjk'})('errors')
|
||||
ot: 1
|
||||
|
@ -1,52 +0,0 @@
|
||||
desc: Tests syncing tables
|
||||
tests:
|
||||
|
||||
# Set up our test tables
|
||||
- cd: r.db('test').table_create('test1')
|
||||
ot: partial({'tables_created':1})
|
||||
- cd: r.db('test').table_create('test1soft')
|
||||
ot: partial({'tables_created':1})
|
||||
- cd: r.db('test').table('test1soft').config().update({'durability':'soft'})
|
||||
ot: {'skipped':0, 'deleted':0, 'unchanged':0, 'errors':0, 'replaced':1, 'inserted':0}
|
||||
- def: tbl = r.db('test').table('test1')
|
||||
- def: tbl_soft = r.db('test').table('test1soft')
|
||||
- cd: tbl.index_create('x')
|
||||
ot: partial({'created':1})
|
||||
- cd: tbl.index_wait('x').pluck('index', 'ready')
|
||||
ot: [{'ready':True, 'index':'x'}]
|
||||
|
||||
# This is the only way one can use sync legally at the moment
|
||||
- cd: tbl.sync()
|
||||
ot: {'synced':1}
|
||||
- cd: tbl_soft.sync()
|
||||
ot: {'synced':1}
|
||||
- cd: tbl.sync()
|
||||
ot: {'synced':1}
|
||||
runopts:
|
||||
durability: "soft"
|
||||
- cd: tbl.sync()
|
||||
ot: {'synced':1}
|
||||
runopts:
|
||||
durability: "hard"
|
||||
|
||||
# This is of type table, but sync should still fail (because it makes little sense)
|
||||
- cd: tbl.between(1, 2).sync()
|
||||
ot:
|
||||
cd: err('ReqlQueryLogicError', 'Expected type TABLE but found TABLE_SLICE:', [1])
|
||||
py: err('AttributeError', "'Between' object has no attribute 'sync'")
|
||||
|
||||
# These are not even a table. Sync should fail with a different error message
|
||||
- cd: r.expr(1).sync()
|
||||
ot:
|
||||
cd: err("ReqlQueryLogicError", 'Expected type TABLE but found DATUM:', [1])
|
||||
py: err('AttributeError', "'Datum' object has no attribute 'sync'")
|
||||
- js: tbl.order_by({index:'x'}).sync()
|
||||
rb: tbl.order_by({:index => 'soft'}).sync()
|
||||
ot: err("ReqlQueryLogicError", 'Expected type TABLE but found TABLE_SLICE:', [1])
|
||||
|
||||
# clean up
|
||||
- cd: r.db('test').table_drop('test1')
|
||||
ot: partial({'tables_dropped':1})
|
||||
- cd: r.db('test').table_drop('test1soft')
|
||||
ot: partial({'tables_dropped':1})
|
||||
|
@ -1,170 +0,0 @@
|
||||
desc: Tests updates of selections
|
||||
table_variable_name: tbl, tbl2
|
||||
tests:
|
||||
|
||||
# Set up some data
|
||||
- py: tbl.insert([{'id':i} for i in xrange(100)])
|
||||
js: |
|
||||
tbl.insert(function(){
|
||||
var res = []
|
||||
for (var i = 0; i < 100; i++) {
|
||||
res.push({id:i});
|
||||
}
|
||||
return res;
|
||||
}())
|
||||
rb: tbl.insert((0...100).map{ |i| { :id => i } })
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':100})
|
||||
|
||||
- cd: tbl.count()
|
||||
ot: 100
|
||||
|
||||
- py: tbl2.insert([{'id':i, 'foo':{'bar':i}} for i in xrange(100)])
|
||||
js: |
|
||||
tbl2.insert(function(){
|
||||
var res = []
|
||||
for (var i = 0; i < 100; i++) {
|
||||
res.push({id:i,foo:{bar:i}});
|
||||
}
|
||||
return res;
|
||||
}())
|
||||
rb: tbl2.insert((0...100).map{ |i| { :id => i, :foo => { :bar => i } } })
|
||||
ot: ({'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':100})
|
||||
|
||||
- cd: tbl2.count()
|
||||
ot: 100
|
||||
|
||||
# Identity
|
||||
- py: tbl.get(12).update(lambda row:row)
|
||||
js: tbl.get(12).update(function(row) { return row; })
|
||||
rb: tbl.get(12).update{ |row| row}
|
||||
ot: {'deleted':0.0,'replaced':0.0,'unchanged':1,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# Soft durability point update
|
||||
- py: tbl.get(12).update(lambda row:{'a':row['id'] + 1}, durability='soft')
|
||||
js: tbl.get(12).update(function(row) { return {'a':row('id').add(1)}; }, {durability:'soft'})
|
||||
rb: tbl.get(12).update({ :durability => 'soft' }) { |row| { :a => row[:id] + 1 } }
|
||||
ot: {'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- cd: tbl.get(12)
|
||||
ot: {'id':12, 'a':13}
|
||||
|
||||
# Hard durability point update
|
||||
- py: tbl.get(12).update(lambda row:{'a':row['id'] + 2}, durability='hard')
|
||||
js: tbl.get(12).update(function(row) { return {'a':row('id').add(2)}; }, {durability:'hard'})
|
||||
rb: tbl.get(12).update({ :durability => 'hard' }) { |row| { :a => row[:id] + 2 } }
|
||||
ot: {'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- cd: tbl.get(12)
|
||||
ot: {'id':12, 'a':14}
|
||||
|
||||
# Wrong durability point update
|
||||
- py: tbl.get(12).update(lambda row:{'a':row['id'] + 3}, durability='wrong')
|
||||
js: tbl.get(12).update(function(row) { return {'a':row('id').add(3)}; }, {durability:'wrong'})
|
||||
rb: tbl.get(12).update({ :durability => 'wrong' }) { |row| { :a => row[:id] + 3 } }
|
||||
ot: err('ReqlQueryLogicError', 'Durability option `wrong` unrecognized (options are "hard" and "soft").', [0])
|
||||
|
||||
- cd: tbl.get(12)
|
||||
ot: {'id':12, 'a':14}
|
||||
|
||||
# Point update
|
||||
- py: tbl.get(12).update(lambda row:{'a':row['id']})
|
||||
js: tbl.get(12).update(function(row) { return {'a':row('id')}; })
|
||||
rb: tbl.get(12).update{ |row| { :a => row[:id] } }
|
||||
ot: {'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- cd: tbl.get(12)
|
||||
ot: {'id':12, 'a':12}
|
||||
|
||||
# undo the point update
|
||||
- cd: tbl.get(12).update({'a':r.literal()})
|
||||
ot: {'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# Update selection of table
|
||||
|
||||
- py: tbl.between(10, 20).update(lambda row:{'a':row['id']})
|
||||
js: tbl.between(10, 20).update(function(row) { return {'a':row('id')}; })
|
||||
rb: tbl.between(10, 20).update{ |row| { :a => row[:id] } }
|
||||
ot: {'deleted':0.0,'replaced':10,'unchanged':0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- py: tbl.filter(lambda row:(row['id'] >= 10) & (row['id'] < 20)).update(lambda row:{'a':row['id']})
|
||||
js: tbl.filter(function(row) { return row('id').ge(10).and(row('id').lt(20))}).update(function(row) { return {'a':row('id')}; })
|
||||
rb: tbl.filter{ |row| (row[:id] >= 10).and(row[:id] < 20) }.update{ |row| { :a => row[:id] } }
|
||||
ot: {'deleted':0.0,'replaced':0.0,'unchanged':10,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- py: tbl.filter(lambda row:(row['id'] >= 10) & (row['id'] < 20)).update(lambda row:{'b':row['id']})
|
||||
js: tbl.filter(function(row) { return row('id').ge(10).and(row('id').lt(20))}).update(function(row) { return {'b':row('id')}; })
|
||||
rb: tbl.filter{ |row| (row[:id] >= 10).and(row[:id] < 20) }.update{ |row| { :b => row[:id] } }
|
||||
ot: {'deleted':0.0,'replaced':10,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# now undo that update
|
||||
- cd: tbl.between(10, 20).update({'a':r.literal()})
|
||||
ot: {'deleted':0.0,'replaced':10,'unchanged':0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# trying to change pkey of a document
|
||||
- cd: tbl.get(1).update({'id':2,'d':1})
|
||||
ot: {'first_error':"Primary key `id` cannot be changed (`{\n\t\"id\":\t1\n}` -> `{\n\t\"d\":\t1,\n\t\"id\":\t2\n}`).",'deleted':0.0,'replaced':0.0,'unchanged':0.0,'errors':1,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# check r.row, static value and otherwise
|
||||
- py: tbl.get(1).update({'id':r.row['id'],'d':'b'})
|
||||
js: tbl.get(1).update({'id':r.row('id'),'d':'b'})
|
||||
rb: tbl.get(1).update{ |row| { :id => row[:id], :d => 'b' } }
|
||||
ot: {'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# I don't we don't need the merge, just testing r.row just in case
|
||||
- cd: tbl.get(1).update(r.row.merge({'d':'b'}))
|
||||
rb: tbl.get(1).update{ |row| row.merge({'d':'b'}) }
|
||||
ot: {'deleted':0.0,'replaced':0.0,'unchanged':1,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# test atomicity constraints (positive and negative test)
|
||||
- cd: tbl.get(1).update({'d':r.js('5')})
|
||||
ot: err('ReqlQueryLogicError', 'Could not prove argument deterministic. Maybe you want to use the non_atomic flag?', [0])
|
||||
|
||||
- cd: tbl.get(1).update({'d':tbl.nth(0)})
|
||||
ot: err('ReqlQueryLogicError', 'Could not prove argument deterministic. Maybe you want to use the non_atomic flag?', [0])
|
||||
|
||||
- py: tbl.get(1).update({'d':r.js('5')}, non_atomic=True)
|
||||
js: tbl.get(1).update({'d':r.js('5')}, {'nonAtomic':true})
|
||||
rb: tbl.get(1).update({ :d => r.js('5') }, { :non_atomic => true })
|
||||
ot: {'deleted':0.0,'replaced':1,'unchanged':0.0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- js: tbl.get(1).update({}, 'foo')
|
||||
ot: err('ReqlCompileError', 'Expected 1 argument (not including options) but found 2.')
|
||||
|
||||
- js: tbl.get(1).update({}, {'foo':'bar'})
|
||||
ot: err('ReqlCompileError', 'Unrecognized optional argument `foo`.')
|
||||
|
||||
# Update whole table
|
||||
- py: tbl.update(lambda row:{'a':row['id']})
|
||||
js: tbl.update(function(row) { return {'a':row('id')}; })
|
||||
rb: tbl.update{ |row| { :a => row['id'] } }
|
||||
ot: {'deleted':0.0,'replaced':100,'unchanged':0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# undo the update on the whole table
|
||||
- cd: tbl.update({'a':r.literal()})
|
||||
ot: {'deleted':0.0,'replaced':100,'unchanged':0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
# recursive merge
|
||||
- cd: tbl2.update({'foo':{'bar':2}})
|
||||
ot: {'deleted':0.0,'replaced':99,'unchanged':1,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- cd: tbl2.update({'foo':r.literal({'bar':2})})
|
||||
ot: {'deleted':0.0,'replaced':0,'unchanged':100,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- rb: tbl2.update{|row| {'foo':r.literal({'bar':2})}}
|
||||
ot: {'deleted':0.0,'replaced':0,'unchanged':100,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- cd: tbl2.order_by('id').nth(0)
|
||||
ot: {'id':0,'foo':{'bar':2}}
|
||||
|
||||
- cd: tbl2.update({'foo':{'buzz':2}})
|
||||
ot: {'deleted':0.0,'replaced':100,'unchanged':0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- cd: tbl2.order_by('id').nth(0)
|
||||
ot: {'id':0,'foo':{'bar':2,'buzz':2}}
|
||||
|
||||
- cd: tbl2.update({'foo':r.literal(1)})
|
||||
ot: {'deleted':0.0,'replaced':100,'unchanged':0,'errors':0.0,'skipped':0.0,'inserted':0.0}
|
||||
|
||||
- cd: tbl2.order_by('id').nth(0)
|
||||
ot: {'id':0,'foo':1}
|
||||
|
@ -1,164 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os, re, sys
|
||||
|
||||
# == globals
|
||||
|
||||
printDebug = False
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = str
|
||||
|
||||
# ==
|
||||
|
||||
class yamlValue(unicode):
|
||||
linenumber = None
|
||||
def __new__(cls, value, linenumber=None):
|
||||
if isinstance(value, unicode):
|
||||
real = unicode.__new__(cls, value)
|
||||
else:
|
||||
real = unicode.__new__(cls, value, "utf-8")
|
||||
if linenumber is not None:
|
||||
real.linenumber = int(linenumber)
|
||||
return real
|
||||
|
||||
def __repr__(self):
|
||||
real = super(yamlValue, self).__repr__()
|
||||
return real.lstrip('u')
|
||||
|
||||
def parseYAML(source):
|
||||
|
||||
def debug(message):
|
||||
if printDebug and message:
|
||||
message = str(message).rstrip()
|
||||
if message:
|
||||
print(message)
|
||||
sys.stdout.flush()
|
||||
|
||||
commentLineRegex = re.compile('^\s*#')
|
||||
yamlLineRegex = re.compile('^(?P<indent> *)((?P<itemMarker>- +)(?P<itemContent>.*)|((?P<key>[\w\.]+)(?P<keyExtra>: *))?(?P<content>.*))\s*$')
|
||||
|
||||
def parseYAML_inner(source, indent):
|
||||
returnItem = None
|
||||
|
||||
for linenumber, line in source:
|
||||
if line == '': # no newline, so EOF
|
||||
break
|
||||
|
||||
debug('line %d (%d):%s' % (linenumber, indent, line))
|
||||
|
||||
if line.strip() == '' or commentLineRegex.match(line): # empty or comment line, ignore
|
||||
debug('\tempty/comment line')
|
||||
continue
|
||||
|
||||
# - parse line
|
||||
|
||||
parsedLine = yamlLineRegex.match(line)
|
||||
if not parsedLine:
|
||||
raise Exception('Unparseable YAML line %d: %s' % (linenumber, line.rstrip()))
|
||||
|
||||
lineIndent = len(parsedLine.group('indent'))
|
||||
lineItemMarker = parsedLine.group('itemMarker')
|
||||
lineKey = parsedLine.group('key') or ''
|
||||
lineKeyExtra = parsedLine.group('keyExtra') or ''
|
||||
lineContent = (parsedLine.group('content') or parsedLine.group('itemContent') or '').strip()
|
||||
|
||||
# - handle end-of-sections
|
||||
if lineIndent < indent:
|
||||
# we have dropped out of this item, push back the line and return what we have
|
||||
source.send((linenumber, line))
|
||||
debug('\tout one level')
|
||||
return returnItem
|
||||
|
||||
# - array item
|
||||
if lineItemMarker:
|
||||
debug('\tarray item')
|
||||
# item in an array
|
||||
if returnItem is None:
|
||||
debug('\tnew array, indent is %d' % lineIndent)
|
||||
returnItem = []
|
||||
indent = lineIndent
|
||||
elif not isinstance(returnItem, list):
|
||||
raise Exception('Bad YAML, got a list item while working on a %s on line %d: %s' % (returnItem.__class__.__name__, linenumber, line.rstrip()))
|
||||
indentLevel = lineIndent + len(lineItemMarker)
|
||||
source.send((linenumber, (' ' * (indentLevel) )+ lineContent))
|
||||
returnItem += [parseYAML_inner(source=source, indent=indent + 1)]
|
||||
|
||||
# - dict item
|
||||
elif lineKey:
|
||||
debug('\tdict item')
|
||||
if returnItem is None:
|
||||
debug('\tnew dict, indent is %d' % lineIndent)
|
||||
# new dict
|
||||
returnItem = {}
|
||||
indent = lineIndent
|
||||
elif not isinstance(returnItem, dict):
|
||||
raise Exception('Bad YAML, got a dict value while working on a %s on line %d: %s' % (returnItem.__class__.__name__, linenumber, line.rstrip()))
|
||||
indentLevel = lineIndent + len(lineKey) + len(lineKeyExtra)
|
||||
source.send((linenumber, (' ' * indentLevel) + lineContent))
|
||||
returnItem[lineKey] = parseYAML_inner(source=source, indent=indent + 1)
|
||||
|
||||
# - data - one or more lines of text
|
||||
else:
|
||||
debug('\tvalue')
|
||||
if returnItem is None:
|
||||
returnItem = yamlValue('', linenumber)
|
||||
if lineContent.strip() in ('|', '|-', '>'):
|
||||
continue # yaml multiline marker
|
||||
elif not isinstance(returnItem, yamlValue):
|
||||
raise Exception('Bad YAML, got a value while working on a %s on line %d: %s' % (returnItem.__class__.__name__, linenumber, line.rstrip()))
|
||||
if returnItem:
|
||||
returnItem = yamlValue(returnItem + "\n" + lineContent, returnItem.linenumber) # str subclasses are not fun
|
||||
else:
|
||||
returnItem = yamlValue(lineContent, linenumber)
|
||||
return returnItem
|
||||
|
||||
def parseYAML_generator(source):
|
||||
if hasattr(source, 'capitalize'):
|
||||
if os.path.isfile(source):
|
||||
source = open(source, 'r')
|
||||
else:
|
||||
source = source.splitlines(True)
|
||||
elif hasattr(source, 'readlines'):
|
||||
pass # the for loop will already work
|
||||
|
||||
backlines = []
|
||||
for linenumber, line in enumerate(source):
|
||||
backline = None
|
||||
usedLine = False
|
||||
while usedLine is False or backlines:
|
||||
if backlines:
|
||||
backline = yield backlines.pop()
|
||||
else:
|
||||
usedLine = True
|
||||
backline = yield (linenumber + 1, line)
|
||||
while backline: # loops returning None for every send()
|
||||
assert isinstance(backline, tuple)
|
||||
assert isinstance(backline[0], int)
|
||||
backlines.append(backline)
|
||||
backline = yield None
|
||||
|
||||
return parseYAML_inner(parseYAML_generator(source), indent=0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
import optparse, pprint
|
||||
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option("-d", "--debug", dest="debug", action="store_true", default=False, help="print debug information")
|
||||
(options, args) = parser.parse_args()
|
||||
printDebug = options.debug
|
||||
|
||||
if len(args) < 1:
|
||||
parser.error('%s needs files to process' % os.path.basename(__file__))
|
||||
|
||||
for filePath in args:
|
||||
if not os.path.isfile(filePath):
|
||||
sys.exit('target is not an existing file: %s' % os.path.basename(__file__))
|
||||
|
||||
for filePath in args:
|
||||
print('=== %s' % filePath)
|
||||
pprint.pprint(parseYAML(filePath))
|
@ -1,33 +0,0 @@
|
||||
desc: Tests that manipulation data in tables
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
- def: obj = r.expr({'id':0,'a':0})
|
||||
|
||||
- py: tbl.insert([{'id':i, 'a':i} for i in xrange(3)])
|
||||
js: |
|
||||
tbl.insert(function(){
|
||||
var res = []
|
||||
for (var i = 0; i < 3; i++) {
|
||||
res.push({id:i, 'a':i});
|
||||
}
|
||||
return res;
|
||||
}())
|
||||
rb: tbl.insert((0..2).map{ |i| { :id => i, :a => i } })
|
||||
ot: ({'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':3})
|
||||
|
||||
# Polymorphism
|
||||
- cd:
|
||||
- tbl.merge({'c':1}).nth(0)
|
||||
- obj.merge({'c':1})
|
||||
ot: ({'id':0,'c':1,'a':0})
|
||||
|
||||
- cd:
|
||||
- tbl.without('a').nth(0)
|
||||
- obj.without('a')
|
||||
ot: ({'id':0})
|
||||
|
||||
- cd:
|
||||
- tbl.pluck('a').nth(0)
|
||||
- obj.pluck('a')
|
||||
ot: ({'a':0})
|
@ -1,180 +0,0 @@
|
||||
desc: Tests randomization functions
|
||||
tests:
|
||||
|
||||
# Test sample
|
||||
- cd: r.expr([1,2,3]).sample(3).distinct().count()
|
||||
ot: 3
|
||||
- cd: r.expr([1,2,3]).sample(3).count()
|
||||
ot: 3
|
||||
- cd: r.expr([1,2,3,4,5,6]).sample(3).distinct().count()
|
||||
ot: 3
|
||||
- cd: r.expr([1,2,3]).sample(4).distinct().count()
|
||||
ot: 3
|
||||
- rb: r.expr([[1,2,3], 2]).do{|x| x[0].sample(x[1])}.distinct().count()
|
||||
ot: 2
|
||||
- cd: r.expr([1,2,3]).sample(-1)
|
||||
ot: err('ReqlQueryLogicError', 'Number of items to sample must be non-negative, got `-1`.', [0])
|
||||
- cd: r.expr(1).sample(1)
|
||||
ot: err('ReqlQueryLogicError', 'Cannot convert NUMBER to SEQUENCE', [0])
|
||||
- cd: r.expr({}).sample(1)
|
||||
ot: err('ReqlQueryLogicError', 'Cannot convert OBJECT to SEQUENCE', [0])
|
||||
|
||||
# Test r.random with floating-point values
|
||||
# These expressions should be equivalent
|
||||
- py:
|
||||
- r.random().do(lambda x:r.and_(x.ge(0), x.lt(1)))
|
||||
- r.random(1, float=True).do(lambda x:r.and_(x.ge(0), x.lt(1)))
|
||||
- r.random(0, 1, float=True).do(lambda x:r.and_(x.ge(0), x.lt(1)))
|
||||
- r.random(1, 0, float=True).do(lambda x:r.and_(x.le(1), x.gt(0)))
|
||||
- r.random(r.expr(0), 1, float=True).do(lambda x:r.and_(x.ge(0), x.lt(1)))
|
||||
- r.random(1, r.expr(0), float=True).do(lambda x:r.and_(x.le(1), x.gt(0)))
|
||||
- r.random(r.expr(1), r.expr(0), float=True).do(lambda x:r.and_(x.le(1), x.gt(0)))
|
||||
ot: True
|
||||
|
||||
# Single-argument
|
||||
- py:
|
||||
- r.random(0.495, float=True).do(lambda x:r.and_(x.ge(0), x.lt(0.495)))
|
||||
- r.random(-0.495, float=True).do(lambda x:r.and_(x.le(0), x.gt(-0.495)))
|
||||
- r.random(1823756.24, float=True).do(lambda x:r.and_(x.ge(0), x.lt(1823756.24)))
|
||||
- r.random(-1823756.24, float=True).do(lambda x:r.and_(x.le(0), x.gt(-1823756.24)))
|
||||
ot: True
|
||||
|
||||
# Non-zero-based random numbers
|
||||
- py:
|
||||
- r.random(10.5, 20.153, float=True).do(lambda x:r.and_(x.ge(10.5), x.lt(20.153)))
|
||||
- r.random(20.153, 10.5, float=True).do(lambda x:r.and_(x.le(20.153), x.gt(10.5)))
|
||||
- r.random(31415926.1, 31415926, float=True).do(lambda x:r.and_(x.le(31415926.1), x.gt(31415926)))
|
||||
ot: True
|
||||
|
||||
# Negative random numbers
|
||||
- py:
|
||||
- r.random(-10.5, 20.153, float=True).do(lambda x:r.and_(x.ge(-10.5), x.lt(20.153)))
|
||||
- r.random(-20.153, -10.5, float=True).do(lambda x:r.and_(x.ge(-20.153), x.lt(-10.5)))
|
||||
- r.random(-31415926, -31415926.1, float=True).do(lambda x:r.and_(x.le(-31415926), x.gt(-31415926.1)))
|
||||
ot: True
|
||||
|
||||
# There is a very small chance of collision here
|
||||
- py:
|
||||
- r.expr([r.random(), r.random()]).distinct().count()
|
||||
- r.expr([r.random(1, float=True), r.random(1, float=True)]).distinct().count()
|
||||
- r.expr([r.random(0, 1, float=True), r.random(0, 1, float=True)]).distinct().count()
|
||||
ot: 2
|
||||
|
||||
# Zero range random
|
||||
- py:
|
||||
- r.random(0, float=True).eq(0)
|
||||
- r.random(5, 5, float=True).eq(5)
|
||||
- r.random(-499384756758, -499384756758, float=True).eq(-499384756758)
|
||||
- r.random(-93.94757, -93.94757, float=True).eq(-93.94757)
|
||||
- r.random(294.69148, 294.69148, float=True).eq(294.69148)
|
||||
ot: True
|
||||
|
||||
# Test limits of doubles
|
||||
- def:
|
||||
py: float_max = sys.float_info.max
|
||||
js: float_max = Number.MAX_VALUE
|
||||
rb: float_max = Float::MAX
|
||||
- def:
|
||||
py: float_min = sys.float_info.min
|
||||
js: float_min = Number.MIN_VALUE
|
||||
rb: float_min = Float::MIN
|
||||
- py:
|
||||
- r.random(-float_max, float_max, float=True).do(lambda x:r.and_(x.ge(-float_max), x.lt(float_max)))
|
||||
- r.random(float_max, -float_max, float=True).do(lambda x:r.and_(x.le(float_max), x.gt(-float_max)))
|
||||
- r.random(float_min, float_max, float=True).do(lambda x:r.and_(x.ge(float_min), x.lt(float_max)))
|
||||
- r.random(float_min, -float_max, float=True).do(lambda x:r.and_(x.le(float_min), x.gt(-float_max)))
|
||||
- r.random(-float_min, float_max, float=True).do(lambda x:r.and_(x.ge(-float_min), x.lt(float_max)))
|
||||
- r.random(-float_min, -float_max, float=True).do(lambda x:r.and_(x.le(-float_min), x.gt(-float_max)))
|
||||
ot: True
|
||||
|
||||
# Test r.random with integer values
|
||||
- def:
|
||||
py: upper_limit = 2**53 - 1
|
||||
js: upper_limit = Math.pow(2,53) - 1
|
||||
rb: upper_limit = 2**53 - 1
|
||||
- def:
|
||||
py: lower_limit = 1 - (2**53)
|
||||
js: lower_limit = 1 - Math.pow(2,53)
|
||||
rb: lower_limit = 1 - (2**53)
|
||||
# These expressions should be equivalent
|
||||
- py:
|
||||
- r.random(256).do(lambda x:r.and_(x.ge(0), x.lt(256)))
|
||||
- r.random(0, 256).do(lambda x:r.and_(x.ge(0), x.lt(256)))
|
||||
- r.random(r.expr(256)).do(lambda x:r.and_(x.ge(0), x.lt(256)))
|
||||
- r.random(r.expr(0), 256).do(lambda x:r.and_(x.ge(0), x.lt(256)))
|
||||
- r.random(0, r.expr(256)).do(lambda x:r.and_(x.ge(0), x.lt(256)))
|
||||
- r.random(r.expr(0), r.expr(256)).do(lambda x:r.and_(x.ge(0), x.lt(256)))
|
||||
ot: True
|
||||
|
||||
# Non-zero-based random numbers
|
||||
- py:
|
||||
- r.random(10, 20).do(lambda x:r.and_(x.ge(10), x.lt(20)))
|
||||
- r.random(9347849, 120937493).do(lambda x:r.and_(x.ge(9347849), x.lt(120937493)))
|
||||
js:
|
||||
- r.random(10, 20).do(function(x){return r.and(x.ge(10), x.lt(20))})
|
||||
- r.random(9347849, 120937493).do(function(x){return r.and(x.ge(9347849), x.lt(120937493))})
|
||||
rb:
|
||||
- r.random(10, 20).do{|x| r.and(x.ge(10), x.lt(20))}
|
||||
- r.random(9347849, 120937493).do{|x| r.and(x.ge(9347849), x.lt(120937493))}
|
||||
ot: True
|
||||
|
||||
# Negative random numbers
|
||||
- py:
|
||||
- r.random(-10, 20).do(lambda x:r.and_(x.ge(-10), x.lt(20)))
|
||||
- r.random(-20, -10).do(lambda x:r.and_(x.ge(-20), x.lt(-10)))
|
||||
- r.random(-120937493, -9347849).do(lambda x:r.and_(x.ge(-120937493), x.lt(-9347849)))
|
||||
js:
|
||||
- r.random(-10, 20).do(function(x){return r.and(x.ge(-10), x.lt(20))})
|
||||
- r.random(-20, -10).do(function(x){return r.and(x.ge(-20), x.lt(-10))})
|
||||
- r.random(-120937493, -9347849).do(function(x){return r.and(x.ge(-120937493), x.lt(-9347849))})
|
||||
rb:
|
||||
- r.random(-10, 20).do{|x| r.and(x.ge(-10), x.lt(20))}
|
||||
- r.random(-20, -10).do{|x| r.and(x.ge(-20), x.lt(-10))}
|
||||
- r.random(-120937493, -9347849).do{|x| r.and(x.ge(-120937493), x.lt(-9347849))}
|
||||
ot: True
|
||||
|
||||
# There is a very small chance of collision here
|
||||
- cd: r.expr([r.random(upper_limit), r.random(upper_limit)]).distinct().count()
|
||||
ot: 2
|
||||
- py: r.expr([upper_limit,upper_limit]).map(lambda x:r.random(x)).distinct().count()
|
||||
js: r.expr([upper_limit,upper_limit]).map(function(x){return r.random(x)}).distinct().count()
|
||||
rb: r.expr([upper_limit,upper_limit]).map{|x| r.random(x)}.distinct().count()
|
||||
ot: 2
|
||||
|
||||
# Error cases
|
||||
|
||||
# Non-integer limits
|
||||
- cd: r.random(-0.5)
|
||||
ot: err("ReqlQueryLogicError", "Upper bound (-0.5) could not be safely converted to an integer.", [])
|
||||
- cd: r.random(0.25)
|
||||
ot: err("ReqlQueryLogicError", "Upper bound (0.25) could not be safely converted to an integer.", [])
|
||||
- cd: r.random(-10, 0.75)
|
||||
ot: err("ReqlQueryLogicError", "Upper bound (0.75) could not be safely converted to an integer.", [])
|
||||
- cd: r.random(-120549.25, 39458)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (-120549.25) could not be safely converted to an integer.", [])
|
||||
- cd: r.random(-6.5, 8.125)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (-6.5) could not be safely converted to an integer.", [])
|
||||
|
||||
# Forced integer random with no bounds
|
||||
- py: r.random(float=False)
|
||||
js: r.random({float:false})
|
||||
rb: r.random({:float => false})
|
||||
ot: err("ReqlQueryLogicError", "Generating a random integer requires one or two bounds.", [])
|
||||
|
||||
# Lower bound not less than upper bound
|
||||
- cd: r.random(0)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (0) is not less than upper bound (0).", [])
|
||||
- cd: r.random(0, 0)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (0) is not less than upper bound (0).", [])
|
||||
- cd: r.random(515, 515)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (515) is not less than upper bound (515).", [])
|
||||
- cd: r.random(-956, -956)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (-956) is not less than upper bound (-956).", [])
|
||||
- cd: r.random(-10)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (0) is not less than upper bound (-10).", [])
|
||||
- cd: r.random(20, 2)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (20) is not less than upper bound (2).", [])
|
||||
- cd: r.random(2, -20)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (2) is not less than upper bound (-20).", [])
|
||||
- cd: r.random(1456, 0)
|
||||
ot: err("ReqlQueryLogicError", "Lower bound (1456) is not less than upper bound (0).", [])
|
@ -1,53 +0,0 @@
|
||||
desc: Tests RQL range generation
|
||||
tests:
|
||||
- cd: r.range().type_of()
|
||||
ot: 'STREAM'
|
||||
|
||||
- cd: r.range().limit(4)
|
||||
ot: [0, 1, 2, 3]
|
||||
|
||||
- cd: r.range(4)
|
||||
ot: [0, 1, 2, 3]
|
||||
|
||||
- cd: r.range(2, 5)
|
||||
ot: [2, 3, 4]
|
||||
|
||||
- cd: r.range(0)
|
||||
ot: []
|
||||
|
||||
- cd: r.range(5, 2)
|
||||
ot: []
|
||||
|
||||
- cd: r.range(-5, -2)
|
||||
ot: [-5, -4, -3]
|
||||
|
||||
- cd: r.range(-5, 2)
|
||||
ot: [-5, -4, -3, -2, -1, 0, 1]
|
||||
|
||||
- cd: r.range(2, 5, 8)
|
||||
ot: err("ReqlCompileError", "Expected between 0 and 2 arguments but found 3.", [])
|
||||
|
||||
- cd: r.range("foo")
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [])
|
||||
|
||||
# Using 9007199254740994 instead of 9007199254740993 due to #2157
|
||||
- cd: r.range(9007199254740994)
|
||||
ot: err_regex("ReqlQueryLogicError", "Number not an integer \\(>2\\^53\\). 9007199254740994", [])
|
||||
|
||||
- cd: r.range(-9007199254740994)
|
||||
ot: err_regex("ReqlQueryLogicError", "Number not an integer \\(<-2\\^53\\). -9007199254740994", [])
|
||||
|
||||
- cd: r.range(0.5)
|
||||
ot: err_regex("ReqlQueryLogicError", "Number not an integer. 0\\.5", [])
|
||||
|
||||
- cd: r.range().count()
|
||||
ot: err("ReqlQueryLogicError", "Cannot use an infinite stream with an aggregation function (`reduce`, `count`, etc.) or coerce it to an array.", [])
|
||||
|
||||
- cd: r.range().coerce_to("ARRAY")
|
||||
ot: err("ReqlQueryLogicError", "Cannot use an infinite stream with an aggregation function (`reduce`, `count`, etc.) or coerce it to an array.", [])
|
||||
|
||||
- cd: r.range().coerce_to("OBJECT")
|
||||
ot: err("ReqlQueryLogicError", "Cannot use an infinite stream with an aggregation function (`reduce`, `count`, etc.) or coerce it to an array.", [])
|
||||
|
||||
- cd: r.range(4).count()
|
||||
ot: 4
|
@ -1,20 +0,0 @@
|
||||
desc: 1001 (null + between + sindexes)
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- cd: tbl.insert({'a':null})
|
||||
rb: tbl.insert({:a => null})
|
||||
- cd: tbl.index_create('a')
|
||||
- cd: tbl.index_create('b')
|
||||
- cd: tbl.index_wait().pluck('index', 'ready')
|
||||
|
||||
- cd: tbl.between(r.minval, r.maxval).count()
|
||||
ot: 1
|
||||
- py: tbl.between(r.minval, r.maxval, index='a').count()
|
||||
js: tbl.between(r.minval, r.maxval, {index:'a'}).count()
|
||||
rb: tbl.between(r.minval, r.maxval, :index => 'a').count()
|
||||
ot: 0
|
||||
- py: tbl.between(r.minval, r.maxval, index='b').count()
|
||||
js: tbl.between(r.minval, r.maxval, {index:'b'}).count()
|
||||
rb: tbl.between(r.minval, r.maxval, :index => 'b').count()
|
||||
ot: 0
|
||||
|
@ -1,19 +0,0 @@
|
||||
desc: Regression test for issue #1005.
|
||||
tests:
|
||||
- py: r.expr(str(r.table_list()))
|
||||
ot: "r.table_list()"
|
||||
|
||||
- py: r.expr(str(r.table_create('a')))
|
||||
ot: "r.table_create('a')"
|
||||
|
||||
- py: r.expr(str(r.table_drop('a')))
|
||||
ot: "r.table_drop('a')"
|
||||
|
||||
- py: r.expr(str(r.db('a').table_list()))
|
||||
ot: "r.db('a').table_list()"
|
||||
|
||||
- py: r.expr(str(r.db('a').table_create('a')))
|
||||
ot: "r.db('a').table_create('a')"
|
||||
|
||||
- py: r.expr(str(r.db('a').table_drop('a')))
|
||||
ot: "r.db('a').table_drop('a')"
|
@ -1,65 +0,0 @@
|
||||
desc: Tests key sorting of all usable types in primary indexes
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
|
||||
# Test key sorting
|
||||
- def:
|
||||
py: binary_a = r.binary(b'')
|
||||
rb: binary_a = r.binary('')
|
||||
js: binary_a = Buffer('')
|
||||
|
||||
- def:
|
||||
py: binary_b = r.binary(b'5aurhbviunr')
|
||||
rb: binary_b = r.binary('5aurhbviunr')
|
||||
js: binary_b = Buffer('5aurhbviunr')
|
||||
|
||||
# Define a set of rows in order of increasing sindex keys
|
||||
- def:
|
||||
cd: trows = [{'num':0,'id':[0]},
|
||||
{'num':1,'id':[1, 2, 3, 4, 0]},
|
||||
{'num':2,'id':[1, 2, 3, 4, 4]},
|
||||
{'num':3,'id':[1, 2, 3, 4, 4, 5]},
|
||||
{'num':4,'id':[1, 2, 3, 4, 8, 1]},
|
||||
{'num':5,'id':[1, 3, r.epoch_time(0)]},
|
||||
{'num':6,'id':[1, 3, r.epoch_time(0), r.epoch_time(0)]},
|
||||
{'num':7,'id':[1, 3, r.epoch_time(0), r.epoch_time(1)]},
|
||||
{'num':8,'id':[1, 4, 3, 4, 8, 2]},
|
||||
{'num':9,'id':False},
|
||||
{'num':10,'id':True},
|
||||
{'num':11,'id':-500},
|
||||
{'num':12,'id':500},
|
||||
{'num':13,'id':binary_a},
|
||||
{'num':14,'id':binary_b},
|
||||
{'num':15,'id':r.epoch_time(0)},
|
||||
{'num':16,'id':''},
|
||||
{'num':17,'id':' str'}]
|
||||
|
||||
- def:
|
||||
cd: expected = r.range(tbl.count()).coerce_to('array')
|
||||
|
||||
- cd: tbl.insert(trows)['inserted']
|
||||
js: tbl.insert(trows)('inserted')
|
||||
ot: 18
|
||||
|
||||
- rb: tbl.order_by({:index => 'id'}).map{|row| row['num']}.coerce_to('array').eq(expected)
|
||||
js: tbl.order_by({index:'id'}).map(r.row('num')).coerce_to('array').eq(expected)
|
||||
py: tbl.order_by(index='id').map(r.row['num']).coerce_to('array').eq(expected)
|
||||
ot: true
|
||||
|
||||
# Test minval and maxval
|
||||
- rb: tbl.order_by(:index => 'id').between(r.minval, r.maxval).map{|x| x['num']}.coerce_to('array').eq(expected)
|
||||
js: tbl.order_by({index:'id'}).between(r.minval, r.maxval).map(r.row('num')).coerce_to('array').eq(expected)
|
||||
py: tbl.order_by(index='id').between(r.minval, r.maxval).map(r.row['num']).coerce_to('array').eq(expected)
|
||||
ot: true
|
||||
|
||||
- py: tbl.order_by(index='id').between([1,2,3,4,4],[1,2,3,5]).map(r.row['num']).coerce_to('array')
|
||||
js: tbl.order_by({index:'id'}).between([1,2,3,4,4],[1,2,3,5]).map(r.row('num')).coerce_to('array')
|
||||
rb: tbl.order_by(:index => 'id').between([1,2,3,4,4],[1,2,3,5]).map{|x| x['num']}.coerce_to('array')
|
||||
ot: [2,3,4]
|
||||
|
||||
- py: tbl.order_by(index='id').between([1,2,3,4,4,r.minval],[1,2,3,4,4,r.maxval]).map(r.row['num']).coerce_to('array')
|
||||
js: tbl.order_by({index:'id'}).between([1,2,3,4,4,r.minval],[1,2,3,4,4,r.maxval]).map(r.row('num')).coerce_to('array')
|
||||
rb: tbl.order_by(:index => 'id').between([1,2,3,4,4,r.minval],[1,2,3,4,4,r.maxval]).map{|x| x['num']}.coerce_to('array')
|
||||
ot: [3]
|
||||
|
@ -1,39 +0,0 @@
|
||||
desc: 1081 union two streams
|
||||
tests:
|
||||
|
||||
- rb: r.db('test').table_create('t1081')
|
||||
def: t = r.db('test').table('t1081')
|
||||
|
||||
- rb: t.insert([{'id':0}, {'id':1}])
|
||||
|
||||
- rb: r([]).union([]).typeof
|
||||
ot: ("ARRAY")
|
||||
- rb: t.union(t).typeof
|
||||
ot: ("STREAM")
|
||||
- rb: t.union([]).typeof
|
||||
ot: ("STREAM")
|
||||
|
||||
- rb: r.db('test').table_drop('t1081')
|
||||
|
||||
- rb: r.table_create('1081')
|
||||
ot: partial({'tables_created':1})
|
||||
|
||||
- rb: r.table('1081').insert({:password => 0})[:inserted]
|
||||
ot: 1
|
||||
|
||||
- rb: r.table('1081').index_create('password')
|
||||
ot: ({'created':1})
|
||||
- rb: r.table('1081').index_wait('password').pluck('index', 'ready')
|
||||
ot: ([{'ready':True, 'index':'password'}])
|
||||
|
||||
- rb: r.table('1081').get_all(0, :index => 'password').typeof
|
||||
ot: ("SELECTION<STREAM>")
|
||||
- rb: r.table('1081').get_all(0, :index => 'password').without('id').typeof
|
||||
ot: ("STREAM")
|
||||
- rb: r.table('1081').get_all(0, 0, :index => 'password').typeof
|
||||
ot: ("SELECTION<STREAM>")
|
||||
- rb: r.table('1081').get_all(0, 0, :index => 'password').without('id').typeof
|
||||
ot: ("STREAM")
|
||||
|
||||
- rb: r.table_drop('1081')
|
||||
ot: partial({'tables_dropped':1})
|
@ -1,4 +0,0 @@
|
||||
desc: 1132 JSON duplicate key
|
||||
tests:
|
||||
- cd: r.json('{"a":1,"a":2}')
|
||||
ot: err("ReqlQueryLogicError", "Duplicate key \"a\" in JSON.", [])
|
@ -1,19 +0,0 @@
|
||||
desc: Regression tests for issue #1133, which concerns circular references in the drivers.
|
||||
|
||||
tests:
|
||||
- def: a = {}
|
||||
- def: b = {'a':a}
|
||||
- def: a['b'] = b
|
||||
|
||||
- cd: r.expr(a)
|
||||
ot:
|
||||
cd: err('ReqlDriverCompileError', 'Nesting depth limit exceeded.', [])
|
||||
rb: err('ReqlDriverCompileError', 'Maximum expression depth exceeded (you can override this with `r.expr(X, MAX_DEPTH)`).', [])
|
||||
|
||||
- cd: r.expr({'a':{'a':{'a':{'a':{'a':{'a':{'a':{}}}}}}}}, 7)
|
||||
ot:
|
||||
cd: err('ReqlDriverCompileError', 'Nesting depth limit exceeded.', [])
|
||||
rb: err('ReqlDriverCompileError', 'Maximum expression depth exceeded (you can override this with `r.expr(X, MAX_DEPTH)`).', [])
|
||||
|
||||
- cd: r.expr({'a':{'a':{'a':{'a':{'a':{'a':{'a':{}}}}}}}}, 10)
|
||||
ot: ({'a':{'a':{'a':{'a':{'a':{'a':{'a':{}}}}}}}})
|
@ -1,5 +0,0 @@
|
||||
desc: 1155 -- Empty batched_replaces_t constructed
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- rb: tbl.insert([{:id => '2'}, {:id => '4'}])['inserted']
|
||||
ot: 2
|
@ -1,26 +0,0 @@
|
||||
desc: 1179 -- BRACKET term
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- js: r.expr([1])(r.expr(0))
|
||||
py: r.expr([1])[r.expr(0)]
|
||||
rb: r.expr([1])[r.expr(0)]
|
||||
ot: 1
|
||||
- js: r.expr({"foo":1})('foo')
|
||||
ot: 1
|
||||
- js: r.expr([1])(0)
|
||||
ot: 1
|
||||
- js: tbl.insert([{'id':42},{'id':4},{'id':89},{'id':6},{'id':43}]).pluck('inserted','first_error')
|
||||
ot: ({'inserted':5})
|
||||
|
||||
# test [] grouped data semantics
|
||||
- js: tbl.group('id')(0)
|
||||
ot: ([{"group":4,"reduction":{"id":4}},{"group":6,"reduction":{"id":6}},{"group":42,"reduction":{"id":42}},{"group":43,"reduction":{"id":43}},{"group":89,"reduction":{"id":89}}] )
|
||||
- js: tbl.coerce_to('array').group('id')(0)
|
||||
ot: ([{"group":4,"reduction":{"id":4}},{"group":6,"reduction":{"id":6}},{"group":42,"reduction":{"id":42}},{"group":43,"reduction":{"id":43}},{"group":89,"reduction":{"id":89}}] )
|
||||
|
||||
# test nth grouped data semantics
|
||||
- js: tbl.group('id').nth(0)
|
||||
ot: ([{"group":4,"reduction":{"id":4}},{"group":6,"reduction":{"id":6}},{"group":42,"reduction":{"id":42}},{"group":43,"reduction":{"id":43}},{"group":89,"reduction":{"id":89}}] )
|
||||
- js: tbl.coerce_to('array').group('id').nth(0)
|
||||
ot: ([{"group":4,"reduction":{"id":4}},{"group":6,"reduction":{"id":6}},{"group":42,"reduction":{"id":42}},{"group":43,"reduction":{"id":43}},{"group":89,"reduction":{"id":89}}] )
|
||||
|
@ -1,7 +0,0 @@
|
||||
desc: 1468 -- Empty batched_replaces_t constructed
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- rb: tbl.insert([{}, {}, {}])['inserted']
|
||||
ot: (3)
|
||||
- rb: tbl.replace(non_atomic:'true'){|row| r.js("{}")}
|
||||
ot: ({"unchanged"=>0,"skipped"=>0,"replaced"=>0,"inserted"=>0,"first_error"=>"Cannot convert javascript `undefined` to ql::datum_t.","errors"=>3,"deleted"=>0})
|
@ -1,22 +0,0 @@
|
||||
desc: 1789 -- deleting a secondary index on a table that contains non-inline stored documents corrupts db
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- rb: tbl.insert({:foo => 'a', :data => "AAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"}).pluck('inserted')
|
||||
ot: ({'inserted':1})
|
||||
|
||||
- rb: tbl.index_create('foo')
|
||||
ot: ({'created':1})
|
||||
|
||||
- rb: tbl.index_wait('foo').pluck('index', 'ready')
|
||||
ot: ([{'index':'foo', 'ready':true}])
|
||||
|
||||
- rb: tbl.index_drop('foo')
|
||||
ot: ({'dropped':1})
|
||||
|
||||
- rb: tbl.coerce_to('ARRAY').count()
|
||||
ot: (1)
|
||||
|
@ -1,10 +0,0 @@
|
||||
desc: 2052 -- Verify that the server rejects bogus global options.
|
||||
tests:
|
||||
- cd: r.expr(1)
|
||||
runopts:
|
||||
array_limit: 16
|
||||
ot: 1
|
||||
- cd: r.expr(1)
|
||||
runopts:
|
||||
obviously_bogus: 16
|
||||
ot: err("ReqlCompileError", "Unrecognized global optional argument `obviously_bogus`.", [])
|
@ -1,45 +0,0 @@
|
||||
desc: 2399 literal terms not removed under certain circumstances
|
||||
table_variable_name: t
|
||||
tests:
|
||||
- rb: t.insert({})
|
||||
- rb: t.update({:a => {:b => r.literal({})}})
|
||||
- rb: t.without('id').coerce_to("ARRAY")
|
||||
ot: [{'a':{'b':{}}}]
|
||||
- rb: t.delete()
|
||||
|
||||
- rb: t.insert({})
|
||||
- rb: t.update({:a => {:b => r.literal()}})
|
||||
- rb: t.without('id').coerce_to("ARRAY")
|
||||
ot: [{'a': {}}]
|
||||
- rb: t.delete()
|
||||
|
||||
- rb: t.insert({})
|
||||
- rb: t.update({:a => {:b => {:c => {:d => r.literal({})}}}})
|
||||
- rb: t.without('id').coerce_to("ARRAY")
|
||||
ot: [{'a':{'b':{'c':{'d':{}}}}}]
|
||||
- rb: t.delete()
|
||||
|
||||
- rb: t.insert({})
|
||||
- rb: t.update({:a => {:b => [[[{:c => r.literal({})}]]]}})
|
||||
- rb: t.without('id').coerce_to("ARRAY")
|
||||
ot: [{'a':{'b':[[[{'c':{}}]]]}}]
|
||||
- rb: t.delete()
|
||||
|
||||
- rb: t.insert({})
|
||||
- rb: t.update({:a => {:b => [r.literal()]}})
|
||||
- rb: t.without('id').coerce_to("ARRAY")
|
||||
ot: [{'a':{'b':[]}}]
|
||||
- rb: t.delete()
|
||||
|
||||
- rb: t.insert({})
|
||||
- rb: t.update({:a => {:b => {:a => 'A', :b => 'B', :c => 'C', :cc => r.literal(), :d => 'D'}}})
|
||||
- rb: t.without('id').coerce_to("ARRAY")
|
||||
ot: [{'a':{'b':{'a':'A', 'b':'B', 'c':'C', 'd':'D'}}}]
|
||||
- rb: t.delete()
|
||||
|
||||
- rb: t.insert({})
|
||||
- rb: t.update({:a => {:b => {:a => 'A', :b => 'B', :c => 'C', :cc => r.literal('CC'), :d => 'D'}}})
|
||||
- rb: t.without('id').coerce_to("ARRAY")
|
||||
ot: [{'a':{'b':{'a':'A', 'b':'B', 'c':'C', 'cc':'CC', 'd':'D'}}}]
|
||||
- rb: t.delete()
|
||||
|
@ -1,8 +0,0 @@
|
||||
desc: 2639 -- Coroutine stacks should not overflow during the query compilation phase.
|
||||
tests:
|
||||
- rb: r.expr({id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:{id:1}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}, 1000)
|
||||
ot: partial({})
|
||||
|
||||
- rb: r.expr([[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]],1000).and(nil)
|
||||
ot: nil
|
||||
|
@ -1,6 +0,0 @@
|
||||
desc: Regression test for issue 2696, delete_at with end bounds.
|
||||
tests:
|
||||
- cd: r.expr([1,2,3,4]).delete_at(4,4)
|
||||
ot: [1,2,3,4]
|
||||
- cd: r.expr([]).delete_at(0,0)
|
||||
ot: []
|
@ -1,31 +0,0 @@
|
||||
desc: 2697 -- Array insert and splice operations don't check array size limit.
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
# make enormous > 100,000 element array
|
||||
- def: ten_l = r.expr([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
|
||||
- js: tbl.insert({'id':1, 'a':r.expr(ten_l).concatMap(function(l) { return ten_l }).concatMap(function(l) { return ten_l }).concatMap(function(l) { return ten_l }).concatMap(function(l) { return ten_l })}).pluck('first_error', 'inserted')
|
||||
py: tbl.insert({'id':1, 'a':r.expr(ten_l).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11)))}).pluck('first_error', 'inserted')
|
||||
rb: tbl.insert({'id':1, 'a':r.expr(ten_l).concat_map {|l| ten_l}.concat_map {|l| ten_l}.concat_map {|l| ten_l}.concat_map {|l| ten_l}}).pluck('first_error', 'inserted')
|
||||
ot: ({'inserted':1})
|
||||
- cd: tbl.get(1).replace({'id':1, 'a':r.row['a'].splice_at(0, [2])}).pluck('first_error')
|
||||
js: tbl.get(1).replace({'id':1, 'a':r.row('a').spliceAt(0, [2])}).pluck('first__error')
|
||||
rb: tbl.get(1).replace{|old| {:id => 1, :a => old['a'].splice_at(0, [2])}}.pluck('first_error')
|
||||
ot: ({'first_error':'Array over size limit `100000`.'})
|
||||
- cd: tbl.get(1)['a'].count()
|
||||
js: tbl.get(1)('a').count()
|
||||
ot: 100000
|
||||
- cd: tbl.get(1).replace({'id':1, 'a':r.row['a'].insert_at(0, [2])}).pluck('first_error')
|
||||
js: tbl.get(1).replace({'id':1, 'a':r.row('a').insertAt(0, [2])}).pluck('first__error')
|
||||
rb: tbl.get(1).replace{|old| {:id => 1, :a => old['a'].insert_at(0, [2])}}.pluck('first_error')
|
||||
ot: ({'first_error':'Array over size limit `100000`.'})
|
||||
- cd: tbl.get(1)['a'].count()
|
||||
js: tbl.get(1)('a').count()
|
||||
ot: 100000
|
||||
- js: r.expr([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).concatMap(function(l) { return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }).concatMap(function(l) { return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }).concatMap(function(l) { return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }).concatMap(function(l) { return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }).spliceAt(0, [1]).count()
|
||||
py: r.expr(ten_l).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11))).splice_at(0, [1]).count()
|
||||
rb: r.expr(ten_l).concat_map {|l| ten_l}.concat_map {|l| ten_l}.concat_map {|l| ten_l}.concat_map {|l| ten_l}.splice_at(0, [1]).count()
|
||||
ot: err("ReqlResourceLimitError", "Array over size limit `100000`.", [])
|
||||
- js: r.expr([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]).concatMap(function(l) { return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }).concatMap(function(l) { return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }).concatMap(function(l) { return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }).concatMap(function(l) { return [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] }).insertAt(0, [1]).count()
|
||||
py: r.expr(ten_l).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11))).concat_map(lambda l:list(range(1,11))).insert_at(0, [1]).count()
|
||||
rb: r.expr(ten_l).concat_map {|l| ten_l}.concat_map {|l| ten_l}.concat_map {|l| ten_l}.concat_map {|l| ten_l}.insert_at(0, [1]).count()
|
||||
ot: err("ReqlResourceLimitError", "Array over size limit `100000`.", [])
|
@ -1,21 +0,0 @@
|
||||
desc: 2709 -- Guarantee failed with [max_els >= min_els]
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- py: tbl.insert([{'result':i} for i in range(1,1000)]).pluck('first_error', 'inserted')
|
||||
runopts:
|
||||
min_batch_rows: 10
|
||||
max_batch_rows: 13
|
||||
ot: ({'inserted':999})
|
||||
|
||||
- py: tbl.map(lambda thing:'key').count()
|
||||
runopts:
|
||||
min_batch_rows: 10
|
||||
max_batch_rows: 13
|
||||
ot: (999)
|
||||
|
||||
- py: tbl.map(lambda thing:'key').count()
|
||||
runopts:
|
||||
min_batch_rows: 10
|
||||
max_batch_rows: 13
|
||||
ot: (999)
|
||||
|
@ -1,6 +0,0 @@
|
||||
desc: Test pseudo literal strings in JSON.
|
||||
tests:
|
||||
- js: r.expr({"a":{"b":1, "c":2}}).merge(r.json('{"a":{"$reql_'+'type$":"LITERAL", "value":{"b":2}}}'))
|
||||
py: r.expr({"a":{"b":1, "c":2}}).merge(r.json('{"a":{"$reql_type$":"LITERAL", "value":{"b":2}}}'))
|
||||
rb: r.expr({:a => {:b => 1, :c => 2}}).merge(r.json('{"a":{"$reql_type$":"LITERAL", "value":{"b":2}}}'))
|
||||
ot: ({'a':{'b':2}})
|
@ -1,25 +0,0 @@
|
||||
desc: Stop people treating ptypes as objects
|
||||
tests:
|
||||
- cd: r.now()['epoch_time']
|
||||
js: r.now()('epoch_time')
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `bracket` on objects of type `PTYPE<TIME>`.")
|
||||
- cd: r.now().get_field('epoch_time')
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `get_field` on objects of type `PTYPE<TIME>`.")
|
||||
- cd: r.now().keys()
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `keys` on objects of type `PTYPE<TIME>`.")
|
||||
- cd: r.now().pluck('epoch_time')
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `pluck` on objects of type `PTYPE<TIME>`.")
|
||||
- cd: r.now().without('epoch_time')
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `without` on objects of type `PTYPE<TIME>`.")
|
||||
- cd: r.now().merge({"foo":4})
|
||||
rb: r.now().merge({"foo"=>4})
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `merge` on objects of type `PTYPE<TIME>`.")
|
||||
- cd: r.expr({"foo":4}).merge(r.now())
|
||||
rb: r.expr({"foo"=>4}).merge(r.now())
|
||||
ot: err("ReqlQueryLogicError", "Cannot merge objects of type `PTYPE<TIME>`.")
|
||||
- cd: r.now().has_fields('epoch_time')
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `has_fields` on objects of type `PTYPE<TIME>`.")
|
||||
- cd: r.object().has_fields(r.time(2014, 7, 7, 'Z'))
|
||||
ot: err("ReqlQueryLogicError", "Invalid path argument `1404691200`.")
|
||||
- cd: r.expr(1).keys()
|
||||
ot: err("ReqlQueryLogicError", "Cannot call `keys` on objects of type `NUMBER`.")
|
@ -1,20 +0,0 @@
|
||||
desc: 2767 -- Evaulate secondary index function with pristine env.
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- py: tbl.index_create('foo', lambda x:(x['a']+[1,2,3,4,5]+[6,7,8,9,10]).count())
|
||||
runopts:
|
||||
array_limit: 6
|
||||
ot: {'created':1}
|
||||
- py: tbl.index_wait()
|
||||
- py: tbl.insert({'id':1,'a':[1,2,3,4,5]})
|
||||
runopts:
|
||||
array_limit: 6
|
||||
ot: {'deleted':0,'replaced':0,'unchanged':0,'errors':0,'skipped':0,'inserted':1}
|
||||
- py: tbl.coerce_to('array')
|
||||
ot: [{'id':1,'a':[1,2,3,4,5]}]
|
||||
- py: tbl.get_all(15, index='foo').coerce_to('array')
|
||||
ot: [{'id':1,'a':[1,2,3,4,5]}]
|
||||
- py: tbl.get_all(15, index='foo').coerce_to('array')
|
||||
runopts:
|
||||
array_limit: 6
|
||||
ot: [{'id':1,'a':[1,2,3,4,5]}]
|
@ -1,99 +0,0 @@
|
||||
desc: Tests key sorting of all usable types in secondary indexes
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
|
||||
# Test key sorting
|
||||
- def:
|
||||
py: binary_a = r.binary(b'')
|
||||
rb: binary_a = r.binary('')
|
||||
js: binary_a = Buffer('')
|
||||
|
||||
- def:
|
||||
py: binary_trunc1 = r.binary(b'123456789012345678901234567890123456789012345678901234567890' +
|
||||
b'123456789012345678901234567890123456789012345678901234567890')
|
||||
rb: binary_trunc1 = r.binary('123456789012345678901234567890123456789012345678901234567890' +
|
||||
'123456789012345678901234567890123456789012345678901234567890')
|
||||
js: binary_trunc1 = Buffer('123456789012345678901234567890123456789012345678901234567890' +
|
||||
'123456789012345678901234567890123456789012345678901234567890')
|
||||
|
||||
- def:
|
||||
py: binary_trunc2 = r.binary(b'123456789012345678901234567890123456789012345678901234567890' +
|
||||
b'123456789012345678901234567890123456789012345678901234567891')
|
||||
rb: binary_trunc2 = r.binary('123456789012345678901234567890123456789012345678901234567890' +
|
||||
'123456789012345678901234567890123456789012345678901234567891')
|
||||
js: binary_trunc2 = Buffer('123456789012345678901234567890123456789012345678901234567890' +
|
||||
'123456789012345678901234567890123456789012345678901234567891')
|
||||
|
||||
- def:
|
||||
py: binary_b = r.binary(b'5aurhbviunr')
|
||||
rb: binary_b = r.binary('5aurhbviunr')
|
||||
js: binary_b = Buffer('5aurhbviunr')
|
||||
|
||||
- def: str_trunc1 = '123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890'
|
||||
- def: str_trunc2 = '123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567891'
|
||||
|
||||
# Define a set of rows in order of increasing sindex keys
|
||||
- def: trows = [ {'id':0,'idx':[0]},
|
||||
{'id':1,'idx':[1, 2, 3, 4, 5, 6, 7, 0]},
|
||||
{'id':2,'idx':[1, 2, 3, 4, 5, 6, 7, 4]},
|
||||
{'id':3,'idx':[1, 2, 3, 4, 5, 6, 7, 4, 5]},
|
||||
{'id':4,'idx':[1, 2, 3, 4, 5, 6, 7, 8, 1]},
|
||||
{'id':5,'idx':[1, 3, binary_trunc1]},
|
||||
{'id':6,'idx':[1, 3, binary_trunc2]},
|
||||
{'id':7,'idx':[1, 3, r.epoch_time(0), r.epoch_time(0), r.epoch_time(0)]},
|
||||
{'id':8,'idx':[1, 3, r.epoch_time(0), r.epoch_time(0), r.epoch_time(0), r.epoch_time(0)]},
|
||||
{'id':9,'idx':[1, 3, r.epoch_time(0), r.epoch_time(0), r.epoch_time(0), r.epoch_time(1)]},
|
||||
{'id':10,'idx':[1, 3, str_trunc1, str_trunc1]},
|
||||
{'id':11,'idx':[1, 3, str_trunc1, str_trunc2]},
|
||||
{'id':12,'idx':[1, 4, 3, 4, 5, 6, 7, 8, 2]},
|
||||
{'id':13,'idx':[binary_trunc1]},
|
||||
{'id':14,'idx':[binary_trunc2]},
|
||||
{'id':15,'idx':False},
|
||||
{'id':16,'idx':True},
|
||||
{'id':17,'idx':-500},
|
||||
{'id':18,'idx':500},
|
||||
{'id':19,'idx':binary_a},
|
||||
{'id':20,'idx':binary_trunc1},
|
||||
{'id':21,'idx':binary_trunc2},
|
||||
{'id':22,'idx':binary_b},
|
||||
{'id':23,'idx':r.epoch_time(0)},
|
||||
{'id':24,'idx':''},
|
||||
{'id':25,'idx':' str'},
|
||||
{'id':26,'idx':str_trunc1},
|
||||
{'id':27,'idx':str_trunc2}]
|
||||
|
||||
- def:
|
||||
cd: expected = r.range(tbl.count()).coerce_to('array')
|
||||
|
||||
- cd: tbl.insert(trows)['inserted']
|
||||
js: tbl.insert(trows)('inserted')
|
||||
ot: 28
|
||||
|
||||
- cd: tbl.index_create('idx')
|
||||
ot: ({'created':1})
|
||||
|
||||
- cd: tbl.index_wait('idx').pluck('index', 'ready')
|
||||
ot: [{'index':'idx','ready':true}]
|
||||
|
||||
- rb: tbl.order_by({:index => 'idx'}).map{|row| row['id']}.coerce_to('array').eq(expected)
|
||||
js: tbl.order_by({index:'idx'}).map(r.row('id')).coerce_to('array').eq(expected)
|
||||
py: tbl.order_by(index='idx').map(r.row['id']).coerce_to('array').eq(expected)
|
||||
ot: true
|
||||
|
||||
# Test minval and maxval
|
||||
- rb: tbl.order_by(:index => 'idx').between(r.minval, r.maxval).map{|x| x['id']}.coerce_to('array').eq(expected)
|
||||
js: tbl.order_by({index:'idx'}).between(r.minval, r.maxval).map(r.row('id')).coerce_to('array').eq(expected)
|
||||
py: tbl.order_by(index='idx').between(r.minval, r.maxval).map(r.row['id']).coerce_to('array').eq(expected)
|
||||
ot: true
|
||||
|
||||
- py: tbl.order_by(index='idx').between([1,2,3,4,5,6,7,4],[1,2,3,4,5,6,8]).map(r.row['id']).coerce_to('array')
|
||||
js: tbl.order_by({index:'idx'}).between([1,2,3,4,5,6,7,4],[1,2,3,4,5,6,8]).map(r.row('id')).coerce_to('array')
|
||||
rb: tbl.order_by(:index => 'idx').between([1,2,3,4,5,6,7,4],[1,2,3,4,5,6,8]).map{|x| x['id']}.coerce_to('array')
|
||||
ot: [2,3,4]
|
||||
|
||||
- py: tbl.order_by(index='idx').between([1,2,3,4,5,6,7,4,r.minval],[1,2,3,4,5,6,7,4,r.maxval]).map(r.row['id']).coerce_to('array')
|
||||
js: tbl.order_by({index:'idx'}).between([1,2,3,4,5,6,7,4,r.minval],[1,2,3,4,5,6,7,4,r.maxval]).map(r.row('id')).coerce_to('array')
|
||||
rb: tbl.order_by(:index => 'idx').between([1,2,3,4,5,6,7,4,r.minval],[1,2,3,4,5,6,7,4,r.maxval]).map{|x| x['id']}.coerce_to('array')
|
||||
ot: [3]
|
||||
|
@ -1,16 +0,0 @@
|
||||
desc: Test that return_changes fails gracefully.
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- py: tbl.insert([{'result':i} for i in range(1,100)]).pluck('first_error', 'inserted')
|
||||
ot: {'inserted':99}
|
||||
|
||||
- py: tbl.update({'foo':'bar'}, return_changes=True)['changes'].count()
|
||||
runopts:
|
||||
array_limit: 40
|
||||
ot: 40
|
||||
|
||||
- py: tbl.update({'foo':'quux'}, return_changes=True)['warnings']
|
||||
runopts:
|
||||
array_limit: 40
|
||||
ot: ['Too many changes, array truncated to 40.']
|
||||
|
@ -1,17 +0,0 @@
|
||||
desc: Avoid misleading array limit error message
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- py: tbl.insert([{'id':i,'mod':i%5,'foo':5} for i in range(1,1000)]).pluck('first_error', 'inserted')
|
||||
ot: ({'inserted':999})
|
||||
- py: tbl.coerce_to('array')
|
||||
runopts:
|
||||
array_limit: 500
|
||||
ot: err("ReqlResourceLimitError", "Array over size limit `500`.", [0])
|
||||
- py: tbl.group('mod').coerce_to('array')
|
||||
runopts:
|
||||
array_limit: 500
|
||||
ot: err("ReqlResourceLimitError", "Grouped data over size limit `500`. Try putting a reduction (like `.reduce` or `.count`) on the end.", [0])
|
||||
- py: tbl.group('foo').coerce_to('array')
|
||||
runopts:
|
||||
array_limit: 500
|
||||
ot: err("ReqlResourceLimitError", "Grouped data over size limit `500`. Try putting a reduction (like `.reduce` or `.count`) on the end.", [0])
|
@ -1,10 +0,0 @@
|
||||
desc: Test empty polygon special cases
|
||||
tests:
|
||||
- cd: r.polygon([0,0], [0,10], [10, 10], [10, 0]).polygon_sub(r.polygon([0,0], [0,10], [10, 10], [10, 0])).intersects(r.point(0,0))
|
||||
ot: (false)
|
||||
- cd: r.polygon([0,0], [0,10], [10, 10], [10, 0]).polygon_sub(r.polygon([0,0], [0,10], [10, 10], [10, 0])).intersects(r.polygon([0,0], [0,10], [10, 10], [10, 0]))
|
||||
ot: (false)
|
||||
- cd: r.polygon([0,0], [0,10], [10, 10], [10, 0]).polygon_sub(r.polygon([0,0], [0,10], [10, 10], [10, 0])).intersects(r.line([0,0], [0,10]))
|
||||
ot: (false)
|
||||
- cd: r.polygon([0,0], [0,10], [10, 10], [10, 0]).intersects(r.polygon([0,0], [0,10], [10, 10], [10, 0]).polygon_sub(r.polygon([0,0], [0,10], [10, 10], [10, 0])))
|
||||
ot: (false)
|
@ -1,7 +0,0 @@
|
||||
desc: Use pseudotype name properly in `info`
|
||||
tests:
|
||||
- cd: r.point(0, 1).type_of()
|
||||
ot: ("PTYPE<GEOMETRY>")
|
||||
- cd: r.point(0, 1).info()['type']
|
||||
js: r.point(0, 1).info()('type')
|
||||
ot: ("PTYPE<GEOMETRY>")
|
@ -1,15 +0,0 @@
|
||||
desc: Regression tests for issue #309, using 'union' on an array and a stream doesn't seem to work
|
||||
table_variable_name: t
|
||||
tests:
|
||||
|
||||
# Set up a stream
|
||||
|
||||
- cd: t.insert([{'id':0}, {'id':1}])
|
||||
|
||||
# Try to union to an array
|
||||
- cd: t.union([2,3,4])
|
||||
ot: bag([{'id':0}, {'id':1}, 2, 3, 4])
|
||||
|
||||
- cd: r.expr([2,3,4]).union(t)
|
||||
ot: bag([{'id':0}, {'id':1}, 2, 3, 4])
|
||||
|
@ -1,38 +0,0 @@
|
||||
desc: Test truncated secondary key ordering under variably sized primary keys
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
- rb: tbl.index_create("3444_A") {|rec| rec['A']}
|
||||
ot: ({'created':1})
|
||||
- rb: tbl.index_create("3444_Z") {|rec| rec['Z']}
|
||||
ot: ({'created':1})
|
||||
- rb: tbl.index_wait("3444_A", "3444_Z").pluck('index', 'ready')
|
||||
ot: (bag([{'ready':True, 'index':'3444_A'}, {'ready':True, 'index':'3444_Z'}]))
|
||||
|
||||
# Insert two documents with very long keys A*1 and A*0 / Z*0 and Z*1
|
||||
# Note that the primary keys "a" and "aa" have different lengths, so
|
||||
# the secondary index key will be truncated at different places.
|
||||
|
||||
# We insert a key A*[01] and a key Z*[01] because the former sorts before the appended
|
||||
# primary key (Sa / Saa), and the latter sorts after it, which are two distinct cases
|
||||
# that we have to test here.
|
||||
|
||||
- rb: tbl.insert({id:'a', A:'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1', Z:'ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ0'})['inserted']
|
||||
ot: (1)
|
||||
- rb: tbl.insert({id:'aa', A:'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0', Z:'ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ1'})['inserted']
|
||||
ot: (1)
|
||||
|
||||
- rb: tbl.order_by(:index => r.asc('3444_A')).limit(1)['id'].coerce_to('ARRAY')
|
||||
ot: (['aa'])
|
||||
- rb: tbl.order_by(:index => r.desc('3444_A')).limit(1)['id'].coerce_to('ARRAY')
|
||||
ot: (['a'])
|
||||
|
||||
- rb: tbl.order_by(:index => r.asc('3444_Z')).limit(1)['id'].coerce_to('ARRAY')
|
||||
ot: (['a'])
|
||||
- rb: tbl.order_by(:index => r.desc('3444_Z')).limit(1)['id'].coerce_to('ARRAY')
|
||||
ot: (['aa'])
|
||||
|
||||
- rb: tbl.index_drop("3444_A")
|
||||
ot: ({'dropped':1})
|
||||
- rb: tbl.index_drop("3444_Z")
|
||||
ot: ({'dropped':1})
|
@ -1,21 +0,0 @@
|
||||
desc: 3449 -- test openness and closedness of range limits under descending ordering
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- js: tbl.insert([{id: 0}, {id: 1}, {id: 2}, {id: 3}])
|
||||
ot: {'skipped':0, 'deleted':0, 'unchanged':0, 'errors':0, 'replaced':0, 'inserted':4}
|
||||
|
||||
# Test ascending ordering as well for completeness
|
||||
- js: tbl.between(1, 3).orderBy({index: r.asc('id')})
|
||||
ot: [{id:1}, {id:2}]
|
||||
- js: tbl.between(1, 3).orderBy({index: r.desc('id')})
|
||||
ot: [{id:2}, {id:1}]
|
||||
|
||||
- js: tbl.between(1, 3, {left_bound: 'open'}).orderBy({index: r.asc('id')})
|
||||
ot: [{id:2}]
|
||||
- js: tbl.between(1, 3, {left_bound: 'open'}).orderBy({index: r.desc('id')})
|
||||
ot: [{id:2}]
|
||||
|
||||
- js: tbl.between(1, 3, {left_bound: 'open', right_bound: 'closed'}).orderBy({index: r.asc('id')})
|
||||
ot: [{id:2}, {id:3}]
|
||||
- js: tbl.between(1, 3, {left_bound: 'open', right_bound: 'closed'}).orderBy({index: r.desc('id')})
|
||||
ot: [{id:3}, {id:2}]
|
@ -1,20 +0,0 @@
|
||||
desc: Regression tests for issue #354, Skip and limit should throw and error
|
||||
tests:
|
||||
|
||||
- def: arr = r.expr([1,2,3,4,5])
|
||||
|
||||
# Correct behavior
|
||||
- cd: arr.skip(2)
|
||||
ot: [3,4,5]
|
||||
|
||||
- cd: arr.skip('a')
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found STRING.", [1])
|
||||
|
||||
- cd: arr.skip([1,2,3])
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found ARRAY.", [1])
|
||||
|
||||
- cd: arr.skip({}).count()
|
||||
ot: err("ReqlQueryLogicError", "Expected type NUMBER but found OBJECT.", [0, 1])
|
||||
|
||||
- cd: arr.skip(null)
|
||||
ot: err("ReqlNonExistenceError", "Expected type NUMBER but found NULL.", [1])
|
@ -1,51 +0,0 @@
|
||||
desc: Test that negative zero and positive zero refer to the same row
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
# In order to send a `-0` from JS we need to provide raw JSON
|
||||
- cd: tbl.insert([{'id':0.0, 'value':'abc'}, {'id':[1, -0.0], 'value':'def'}])
|
||||
js: tbl.insert([{'id':0.0, 'value':'abc'}, {'id':[1, r.json('-0.0')], 'value':'def'}])
|
||||
ot: partial({'inserted':2})
|
||||
|
||||
# Test getting the rows by their original and opposite id
|
||||
- cd: tbl.get(0.0)
|
||||
ot: {'id':0.0, 'value':'abc'}
|
||||
|
||||
- cd: tbl.get(-0.0)
|
||||
js: tbl.get(r.json('-0.0'))
|
||||
ot: {'id':0.0, 'value':'abc'}
|
||||
|
||||
- cd: tbl.get([1, 0.0])
|
||||
ot: {'id':[1, -0.0], 'value':'def'}
|
||||
|
||||
- cd: tbl.get([1, -0.0])
|
||||
js: tbl.get([1, r.json('-0.0')])
|
||||
ot: {'id':[1, -0.0], 'value':'def'}
|
||||
|
||||
# Because I don't trust our test framework, test against a JSON string
|
||||
- cd: tbl.get(0.0).pluck('id').to_json_string()
|
||||
ot: '{"id":0}'
|
||||
|
||||
- cd: tbl.get(-0.0).pluck('id').to_json_string()
|
||||
js: tbl.get(r.json('-0.0')).pluck('id').to_json_string()
|
||||
ot: '{"id":0}'
|
||||
|
||||
- cd: tbl.get([1, 0.0]).pluck('id').to_json_string()
|
||||
ot: '{"id":[1,-0.0]}'
|
||||
|
||||
- cd: tbl.get([1, -0.0]).pluck('id').to_json_string()
|
||||
js: tbl.get([1, r.json('-0.0')]).pluck('id').to_json_string()
|
||||
ot: '{"id":[1,-0.0]}'
|
||||
|
||||
# Test inserting a duplicate
|
||||
- cd:
|
||||
- tbl.insert({'id':0.0})
|
||||
- tbl.insert({'id':[1,0.0]})
|
||||
ot: partial({'errors':1})
|
||||
|
||||
- cd:
|
||||
- tbl.insert({'id':-0.0})
|
||||
- tbl.insert({'id':[1,-0.0]})
|
||||
js:
|
||||
- tbl.insert({'id':r.json('-0.0')})
|
||||
- tbl.insert({'id':[1,r.json('-0.0')]})
|
||||
ot: partial({'errors':1})
|
@ -1,19 +0,0 @@
|
||||
desc: Regression tests for issue #370, calling `map` after `db_list` or `table_list`
|
||||
tests:
|
||||
|
||||
# Set up a stream
|
||||
- cd: r.db('test').table_create('t370')
|
||||
def: d = r.db('test')
|
||||
|
||||
# Map after db_list
|
||||
- cd: r.db_list().map(r.row)
|
||||
rb: r.db_list.map{|row| row}
|
||||
ot: (['rethinkdb', 'test'])
|
||||
|
||||
# Map after table_list
|
||||
- cd: d.table_list().map(r.row)
|
||||
rb: d.table_list.map{|row| row}
|
||||
ot: (['t370'])
|
||||
|
||||
# clean up
|
||||
- cd: r.db('test').table_drop('t370')
|
@ -1,17 +0,0 @@
|
||||
desc: Test that we do not crash on an error during a function called at unsharding
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- cd: tbl.insert([ {'id':0, 'a':5}, {'id':1, 'a':6} ])
|
||||
ot: partial({'inserted':2})
|
||||
|
||||
# Test bare reduce
|
||||
- rb: tbl.reduce{|x,y| r.object('a', r.add(x['a'], y['a']))}
|
||||
py: tbl.reduce(lambda x,y:r.object('a', r.add(x['a'], y['a'])))
|
||||
js: tbl.reduce(function(x,y){return r.object('a', r.add(x('a'), y('a')));})
|
||||
ot: ({'a':11})
|
||||
|
||||
# Test reduce with a function that errors
|
||||
- rb: tbl.reduce{|x,y| r.expr(0)[0]}
|
||||
py: tbl.reduce(lambda x,y:r.expr(0)[0])
|
||||
js: tbl.reduce(function(x,y){return r.expr(0)(0);})
|
||||
ot: err('ReqlQueryLogicError','Cannot convert NUMBER to SEQUENCE')
|
@ -1,26 +0,0 @@
|
||||
desc: Test cleanup of cursors on the server
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
# The JS test driver doesn't support noreply wait, so only test with Python and Ruby
|
||||
|
||||
- py: r.db('rethinkdb').table('jobs').map(lambda:1)
|
||||
rb: r.db('rethinkdb').table('jobs').map{|x| 1}
|
||||
ot: [1]
|
||||
|
||||
- py: r.db('rethinkdb').table('jobs').map(lambda:1)
|
||||
rb: r.db('rethinkdb').table('jobs').map{|x| 1}
|
||||
runopts:
|
||||
noreply: true
|
||||
ot: null
|
||||
|
||||
- py: r.db('rethinkdb').table('jobs').map(lambda:1)
|
||||
rb: r.db('rethinkdb').table('jobs').map{|x| 1}
|
||||
runopts:
|
||||
noreply: true
|
||||
testopts:
|
||||
noreply_wait: true
|
||||
ot: null
|
||||
|
||||
- py: r.db('rethinkdb').table('jobs').map(lambda:1)
|
||||
rb: r.db('rethinkdb').table('jobs').map{|x| 1}
|
||||
ot: [1]
|
@ -1,48 +0,0 @@
|
||||
# note: this should be converted to a full test when #4030 is completed
|
||||
|
||||
desc: test that r.union is a top level function
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
# == setup
|
||||
|
||||
- def: data = [{'id':1}, {'id':2}, {'id':3}, {'id':4}, {'id':5}, {'id':6}]
|
||||
- def: changes = [{'id':7}, {'id':8}, {'id':9}, {'id':10}]
|
||||
|
||||
- cd: tbl.insert(data)
|
||||
ot: partial({'errors':0, 'inserted':6})
|
||||
|
||||
- cd: tbl.count()
|
||||
ot: (6)
|
||||
|
||||
# == tests
|
||||
|
||||
# - two streams
|
||||
- cd: tbl.union(tbl)
|
||||
ot:
|
||||
cd: bag(data * 2)
|
||||
js: bag(data.concat(data))
|
||||
|
||||
# - top level object
|
||||
- cd: r.union(tbl, tbl)
|
||||
ot:
|
||||
cd: bag(data * 2)
|
||||
js: bag(data.concat(data))
|
||||
|
||||
# # - two changefeeds
|
||||
#
|
||||
# - cd: unionFeed = tbl.changes().union(tbl.changes())['new_val']
|
||||
# js: unionFeed = tbl.changes().union(tbl.changes())('new_val')
|
||||
# - cd: tbl.insert(changes)
|
||||
# - cd: fetch(unionFeed, 4 * 2)
|
||||
# ot:
|
||||
# cd: bag(changes * 2)
|
||||
# js: bag(changes.concat(changes))
|
||||
#
|
||||
# # == errors
|
||||
#
|
||||
# # - order by
|
||||
#
|
||||
# - cd: tbl.union(tbl).changes().order_by('id')
|
||||
# ot: err('ReqlRuntimeError', "Cannot call a terminal (`reduce`, `count`, etc.) on an infinite stream (such as a changefeed).")
|
||||
#
|
@ -1,9 +0,0 @@
|
||||
desc: Changefeeds on geo intersection filter
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- js: changefeed = tbl.filter(function(d){ return d("l").intersects(r.polygon([1,2],[2,2],[2,1],[1,1])) }).changes()
|
||||
|
||||
- js: tbl.insert([{"l":r.point(1.5,1.5), "id":1}])
|
||||
ot: partial({'errors':0, 'inserted':1})
|
||||
- js: fetch(changefeed, 1)
|
||||
ot: [{"new_val":{"l":{"$reql_type$":"GEOMETRY","coordinates":[1.5,1.5],"type":"Point"}, "id":1}, "old_val":null}]
|
@ -1,12 +0,0 @@
|
||||
desc: Regression tests for issue 4132
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- cd: r.and()
|
||||
py: r.and_()
|
||||
ot: true
|
||||
- cd: r.or()
|
||||
py: r.or_()
|
||||
ot: false
|
||||
- cd: r.expr(false).or(nil)
|
||||
py: r.expr(false).or_(nil)
|
||||
ot: nil
|
@ -1,14 +0,0 @@
|
||||
desc: Test that multi indexes ignore values that cannot be indexed, still indexing the remaining values
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
# This is testing a property of the server, so a single language (in this case JS)
|
||||
# is enough.
|
||||
- js: tbl.indexCreate("multi_idx", function(x) { return [x("a"), x("b")] }, {multi:true})
|
||||
ot: {created: 1}
|
||||
- js: tbl.indexWait("multi_idx")
|
||||
|
||||
- js: tbl.insert([{a:"a", b:null}, {a:"a", b:r.point(0,0)}])("inserted")
|
||||
ot: 2
|
||||
|
||||
- js: tbl.getAll("a", {index:"multi_idx"}).count()
|
||||
ot: 2
|
@ -1,10 +0,0 @@
|
||||
desc: 4431 -- detect `use_outdated` optarg
|
||||
tests:
|
||||
- cd: r.table('test')
|
||||
runopts:
|
||||
use_outdated: true
|
||||
ot: err('ReqlQueryLogicError', 'The `use_outdated` optarg is no longer supported. Use the `read_mode` optarg instead.')
|
||||
|
||||
- py: r.table('test', use_outdated=True)
|
||||
cd: r.table('test', {use_outdated:true})
|
||||
ot: err('ReqlQueryLogicError', 'The `use_outdated` optarg is no longer supported. Use the `read_mode` optarg instead.')
|
@ -1,24 +0,0 @@
|
||||
desc: 4462 -- Coroutine stacks should not overflow during datum serialization
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
# Create a deeply nested array
|
||||
- js: tbl.insert({id:1,arr:[]})('inserted')
|
||||
ot: 1
|
||||
- js: r.range(1000).forEach(function(i) { return tbl.get(1).update(function(x) { return {arr:[x('arr')]} }, {durability:'soft'})})('replaced')
|
||||
ot: 1000
|
||||
# We don't care about the actual contents here. We just want to make sure the server
|
||||
# doesn't crash when sending the document over.
|
||||
- js: tbl.get(1)
|
||||
ot: partial({})
|
||||
- js: tbl.get(1).delete()('deleted')
|
||||
ot: 1
|
||||
|
||||
# A similar test with a nested object
|
||||
- js: tbl.insert({id:1,obj:{}})('inserted')
|
||||
ot: 1
|
||||
- js: r.range(1000).forEach(function(i) { return tbl.get(1).update(function(x) { return {obj:{a:x('obj')}} }, {durability:'soft'})})('replaced')
|
||||
ot: 1000
|
||||
- js: tbl.get(1)
|
||||
ot: partial({})
|
||||
- js: tbl.get(1).delete()('deleted')
|
||||
ot: 1
|
@ -1,8 +0,0 @@
|
||||
desc: 4465 (Delete tables from table_config)
|
||||
table_variable_name: blah
|
||||
tests:
|
||||
- py: r.db("rethinkdb").table("db_config").delete()
|
||||
ot: {"deleted":1,"errors":0,"inserted":0,"replaced":0,"skipped":0,"unchanged":0}
|
||||
|
||||
- py: r.db("rethinkdb").table("table_status")
|
||||
ot: []
|
@ -1,5 +0,0 @@
|
||||
desc: index_wait should throw on missing indexes.
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
- cd: tbl.index_wait("missing")
|
||||
ot: err_regex('ReqlOpFailedError', 'Index `missing` was not found on table `[a-zA-Z0-9_]+.[a-zA-Z0-9_]+`[.]', [0])
|
@ -1,16 +0,0 @@
|
||||
desc: Sanity Check Fails, with r.table() expression inside a map (#453)
|
||||
table_variable_name: tbl
|
||||
tests:
|
||||
|
||||
- cd: tbl.insert([{'a':1},{'a':2}])
|
||||
ot: partial({'inserted':2})
|
||||
|
||||
- js: tbl.map(function(x) { return tbl; })
|
||||
py: tbl.map(lambda x: tbl)
|
||||
rb: tbl.map{ |x| tbl }
|
||||
ot: err("ReqlQueryLogicError", 'Expected type DATUM but found TABLE:', [0])
|
||||
|
||||
- js: tbl.map(function(x) { return tbl.coerceTo('array'); }).count()
|
||||
py: tbl.map(lambda x: tbl.coerce_to('array')).count()
|
||||
rb: tbl.map{ |x| tbl.coerce_to('array') }.count
|
||||
ot: 2
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user