Skip to content

Commit

Permalink
Merge branch 'nest-query-mlv2' into offset-part-2
Browse files Browse the repository at this point in the history
  • Loading branch information
camsaul committed May 3, 2024
2 parents 6b35ac9 + 3515977 commit bfdd7d9
Show file tree
Hide file tree
Showing 64 changed files with 1,147 additions and 680 deletions.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,15 @@ import {
getNotebookStep,
openNotebook,
openOrdersTable,
openPeopleTable,
popover,
resetSnowplow,
restore,
visitQuestion,
visualize,
} from "e2e/support/helpers";

const { ORDERS, ORDERS_ID } = SAMPLE_DATABASE;
const { ORDERS, ORDERS_ID, PEOPLE } = SAMPLE_DATABASE;

const DATE_CASES = [
{
Expand Down Expand Up @@ -53,6 +54,37 @@ const DATE_CASES = [
},
];

const EMAIL_CASES = [
{
option: "Domain",
value: "yahoo",
example: "example, online",
},
{
option: "Host",
value: "yahoo.com",
example: "example.com, online.com",
},
];

const URL_CASES = [
{
option: "Domain",
value: "yahoo",
example: "example, online",
},
{
option: "Subdomain",
value: "",
example: "www, maps",
},
{
option: "Host",
value: "yahoo.com",
example: "example.com, online.com",
},
];

const DATE_QUESTION = {
query: {
"source-table": ORDERS_ID,
Expand Down Expand Up @@ -86,6 +118,7 @@ describeWithSnowplow("extract action", () => {
option,
value,
example,
extraction: "Extract day, month…",
});
});
});
Expand All @@ -97,6 +130,7 @@ describeWithSnowplow("extract action", () => {
extractColumnAndCheck({
column: "Created At",
option: "Year",
extraction: "Extract day, month…",
});
const columnIndex = 7;
checkColumnIndex({
Expand All @@ -114,6 +148,7 @@ describeWithSnowplow("extract action", () => {
extractColumnAndCheck({
column: "Created At",
option: "Year",
extraction: "Extract day, month…",
});
const columnIndex = 7;
checkColumnIndex({
Expand Down Expand Up @@ -168,6 +203,7 @@ describeWithSnowplow("extract action", () => {
extractColumnAndCheck({
column: "Created At",
option: "Year",
extraction: "Extract day, month…",
});
const columnIndex = 1;
checkColumnIndex({
Expand All @@ -187,6 +223,7 @@ describeWithSnowplow("extract action", () => {
column: "Created At: Month",
option: "Month of year",
value: "Apr",
extraction: "Extract day, month…",
});
});

Expand All @@ -196,6 +233,7 @@ describeWithSnowplow("extract action", () => {
column: "Min of Created At: Default",
option: "Year",
value: "2,022",
extraction: "Extract day, month…",
});
});

Expand All @@ -205,11 +243,13 @@ describeWithSnowplow("extract action", () => {
column: "Created At",
option: "Hour of day",
newColumn: "Hour of day",
extraction: "Extract day, month…",
});
extractColumnAndCheck({
column: "Created At",
option: "Hour of day",
newColumn: "Hour of day_2",
extraction: "Extract day, month…",
});
});

Expand All @@ -219,6 +259,7 @@ describeWithSnowplow("extract action", () => {
column: "Created At",
option: "Year",
value: "2,025",
extraction: "Extract day, month…",
});
openNotebook();
getNotebookStep("expression").findByText("Year").click();
Expand All @@ -237,6 +278,53 @@ describeWithSnowplow("extract action", () => {
column: "Created At",
option: "Tag der Woche",
value: "Dienstag",
extraction: "Extract day, month…",
});
});
});

describe("email columns", () => {
beforeEach(() => {
restore();
cy.signInAsAdmin();
});

EMAIL_CASES.forEach(({ option, value, example }) => {
it(option, () => {
openPeopleTable({ limit: 1 });
extractColumnAndCheck({
column: "Email",
option,
value,
example,
extraction: "Extract domain, host…",
});
});
});
});

describe("url columns", () => {
beforeEach(function () {
restore();
cy.signInAsAdmin();

// Make the Email column a URL column for these tests, to avoid having to create a new model
cy.request("PUT", `/api/field/${PEOPLE.EMAIL}`, {
semantic_type: "type/URL",
});
});

URL_CASES.forEach(({ option, value, example }) => {
it(option, () => {
openPeopleTable({ limit: 1 });

extractColumnAndCheck({
column: "Email",
option,
value,
example,
extraction: "Extract domain, subdomain…",
});
});
});
});
Expand All @@ -246,13 +334,14 @@ function extractColumnAndCheck({
column,
option,
newColumn = option,
extraction,
value,
example,
}) {
const requestAlias = _.uniqueId("dataset");
cy.intercept("POST", "/api/dataset").as(requestAlias);
cy.findByRole("columnheader", { name: column }).click();
popover().findByText("Extract day, month…").click();
popover().findByText(extraction).click();
cy.wait(1);

if (example) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ export function getExample(info: Lib.ColumnExtractionInfo) {
case "year":
return "2023, 2024";
case "domain":
return "example.com, online.com";
case "host":
return "example, online";
case "host":
return "example.com, online.com";
case "subdomain":
return "www, maps";
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,9 @@ export function getExample(info: Lib.ColumnExtractionInfo) {
case "year":
return "2023, 2024";
case "domain":
return "example.com, online.com";
case "host":
return "example, online";
case "host":
return "example.com, online.com";
case "subdomain":
return "www, maps";
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -604,6 +604,7 @@ HistogramTicks45Degrees.args = {
dashcardSettings: {},
renderingContext,
};

export const HistogramTicks90Degrees = Template.bind({});
HistogramTicks90Degrees.args = {
rawSeries: data.histogramTicks90Degrees as any,
Expand All @@ -618,6 +619,13 @@ LineUnpinFromZero.args = {
renderingContext,
};

export const LineReplaceMissingValuesZero = Template.bind({});
LineReplaceMissingValuesZero.args = {
rawSeries: data.lineReplaceMissingValuesZero as any,
dashcardSettings: {},
renderingContext,
};

export const Default = Template.bind({});
Default.args = {
rawSeries: data.messedUpAxis as any,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ import lineLinearXScaleUnsorted from "./line-linear-x-scale-unsorted.json";
import lineLinearXScale from "./line-linear-x-scale.json";
import lineLogYScaleNegative from "./line-log-y-scale-negative.json";
import lineLogYScale from "./line-log-y-scale.json";
import lineReplaceMissingValuesZero from "./line-replace-missing-values-zero.json";
import lineShowDotsAuto from "./line-show-dots-auto.json";
import lineShowDotsOff from "./line-show-dots-off.json";
import lineShowDotsOn from "./line-show-dots-on.json";
Expand Down Expand Up @@ -170,4 +171,5 @@ export const data = {
histogramTicks45Degrees,
histogramTicks90Degrees,
lineUnpinFromZero,
lineReplaceMissingValuesZero,
};

0 comments on commit bfdd7d9

Please sign in to comment.