Skip to content

Commit

Permalink
aquele big rewrite :)
Browse files Browse the repository at this point in the history
  • Loading branch information
guesant committed Dec 11, 2021
1 parent 9327293 commit bd35ffa
Show file tree
Hide file tree
Showing 247 changed files with 13,311 additions and 0 deletions.
4 changes: 4 additions & 0 deletions .eslintignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
.parcel-cache
.pnpm-store
pnpm-lock.yaml
dist
32 changes: 32 additions & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
module.exports = {
env: {
browser: true,
es2021: true
},
settings: {
react: {
version: "detect"
}
},
extends: [
"plugin:react/recommended",
"plugin:react/jsx-runtime",
"standard",
"prettier"
],
parser: "@typescript-eslint/parser",
parserOptions: {
ecmaFeatures: {
jsx: true
},
ecmaVersion: 12,
sourceType: "module"
},
plugins: ["react", "@typescript-eslint"],
rules: {
quotes: ["error", "double"],
"no-void": ["off"],
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error"]
}
}
4 changes: 4 additions & 0 deletions .husky/pre-commit
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"

node_modules/.bin/lint-staged
4 changes: 4 additions & 0 deletions .prettierignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
.parcel-cache
.pnpm-store
pnpm-lock.yaml
dist
4 changes: 4 additions & 0 deletions .prettierrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
module.exports = {
...require("prettier-config-standard"),
singleQuote: false
}
21 changes: 21 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,27 @@ git clone -b dev https://github.com/guesant/ava-pro.git
cd ava-pro
```

#### Development with docker-compose (recommended)

We recommend you to use docker-compose to develop the extension (~~due security reasons lol~~ [[1]](https://thehackernews.com/2021/10/popular-npm-package-hijacked-to-publish.html)).

```sh
docker-compose up dev # -> packages/webextension/dist/dev
docker-compose up build # -> packages/webextension/dist/prod
```

#### Development with your system's nodejs

```sh
npm i -g pnpm
pnpm install
```

```sh
pnpm run dev # -> packages/webextension/dist/dev
pnpm run build # -> packages/webextension/dist/prod
```

## License

[![GNU GPLv3 Image](https://www.gnu.org/graphics/gplv3-127x51.png)](http://www.gnu.org/licenses/gpl-3.0.en.html)
Expand Down
19 changes: 19 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
services:
dev:
image: cl00e9ment/node.js-builder
restart: always
container_name: ava-pro-dev
command: [sh, -c, 'su node -c "pnpm install && pnpm run dev"']
volumes:
- .:/code
working_dir: /code
ports:
- 1234:1234

build:
image: cl00e9ment/node.js-builder
container_name: ava-pro-build
command: [sh, -c, 'su node -c "pnpm install && pnpm run build"']
volumes:
- .:/code
working_dir: /code
31 changes: 31 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
{
"name": "root",
"version": "0.0.2",
"private": true,
"scripts": {
"format": "prettier --write .",
"lint": "eslint --fix --ext js,ts,tsx,jsx packages",
"dev": "pnpm run --filter @ava-pro/webextension start",
"build": "pnpm run clear:cache; pnpm run --filter @ava-pro/webextension build",
"clear:cache": "find . -type d -name .parcel-cache -exec rm -rf {} \\+",
"prepare": "husky install"
},
"lint-staged": {
"**/*": "prettier --write --ignore-unknown"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^5.4.0",
"@typescript-eslint/parser": "^5.4.0",
"eslint": "^7.12.1",
"eslint-config-prettier": "^8.3.0",
"eslint-config-standard": "^16.0.3",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-react": "^7.27.1",
"husky": "^7.0.4",
"lint-staged": "^12.1.2",
"prettier": "^2.4.1",
"prettier-config-standard": "^4.0.0"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { ajaxService } from "../../../../ajaxService"
import { IMessageAreaConversations } from "../../../../Typings/IMessageAreaConversations"

type IConversationsOptions = {
userid: number
} & Record<string, any>

export const Conversations =
(crawlerFetch: ICrawlerFetch) =>
({ ...options }: IConversationsOptions) =>
ajaxService(crawlerFetch)<IMessageAreaConversations>(
"core_message_data_for_messagearea_conversations",
{
limitfrom: 0,
...options
}
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { ajaxService } from "../../../../ajaxService"
import { IMessageArea } from "../../../../Typings/IMessageArea"

type IMessagesOptions = {
otheruserid: number
currentuserid: number
} & Record<string, any>

export const Messages =
(crawlerFetch: ICrawlerFetch) => (options: IMessagesOptions) =>
ajaxService(crawlerFetch)<IMessageArea>(
"core_message_data_for_messagearea_messages",
{ newest: true, ...options }
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { ajaxService } from "../../../../ajaxService"
import { IMessageAreaSearchContacts } from "../../../../Typings/IMessageAreaSearchContacts"

type ISearchOptions = {
userid: number
search: string
} & Record<string, any>

export const Search =
(crawlerFetch: ICrawlerFetch) => (options: ISearchOptions) =>
ajaxService(crawlerFetch)<IMessageAreaSearchContacts>(
"core_message_data_for_messagearea_search_users",
{ ...options }
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { ajaxService } from "../../../ajaxService"
import { IMessageArea } from "../../../Typings/IMessageArea"

type IMarkAllMessagesAsReadOptions = {
useridto: number
useridfrom: number
} & Record<string, any>

export const MarkAllMessagesAsRead =
(crawlerFetch: ICrawlerFetch) => (options: IMarkAllMessagesAsReadOptions) =>
ajaxService(crawlerFetch)<IMessageArea>(
"core_message_mark_all_messages_as_read",
{ ...options }
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { ajaxService } from "../../../ajaxService"

type ISendInstantMessagesOptions = {
text: string
touserid: number
} & Record<string, any>

export const SendInstantMessages =
(crawlerFetch: ICrawlerFetch) =>
({ text, ...options }: ISendInstantMessagesOptions) =>
ajaxService(crawlerFetch)<any>("core_message_send_instant_messages", {
messages: [
{
text: text.trim(),
...options
}
]
})
10 changes: 10 additions & 0 deletions packages/crawlers/lib/Scrappers/CheckLogin/CheckLogin.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { routes } from "../../routes"
import { getLoginState } from "../getLoginState"

export const CheckLogin = (crawlerFetch: ICrawlerFetch) => async () => {
const page = await crawlerFetch({ url: routes.login() }).then((res) =>
res.text()
)
return getLoginState(page)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
export const getMoodleInstanceHome = (document: Document) => {
const homeAnchor = document.querySelector<HTMLAnchorElement>(
"#nav-drawer a[data-key='home'], .homelink a"
)
return homeAnchor?.href
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
export const getMoodleInstanceName = (document: Document) => {
const siteName = document.querySelector<HTMLSpanElement>(".site-name")
return siteName?.textContent ?? document.title ?? document.location.href
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
export const isMoodleInstance = (document: Document) => {
const metaKeywords = document.querySelector('meta[name="keywords"]')

const keywords =
metaKeywords?.attributes.getNamedItem("content")?.value.split(",") || []

return keywords.includes("moodle")
}
28 changes: 28 additions & 0 deletions packages/crawlers/lib/Scrappers/ExtractTokens/ExtractTokens.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { routes } from "../../routes"
import { extractSessKey } from "./extractSessKey"
import { extractUserId } from "./extractUserId"

export const ExtractTokens = (crawlerFetch: ICrawlerFetch) => async () => {
const data = await crawlerFetch({
url: routes.my(),
options: { method: "get" }
}).then((res) => res.text())

const sessKey = extractSessKey(data)
const userId = extractUserId(data)

return { sessKey, userId }
}

export const ExtractTokensSessKey =
(crawlerFetch: ICrawlerFetch) => async () => {
const { sessKey } = await ExtractTokens(crawlerFetch)()
return sessKey
}

export const ExtractTokensUserId =
(crawlerFetch: ICrawlerFetch) => async () => {
const { userId } = await ExtractTokens(crawlerFetch)()
return userId
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
export const extractSessKey = (pageContent: string): string | null => {
const [, sesskey = null] = pageContent.match(/"sesskey":"([^"]+)"/) || []
return sesskey
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
export const extractUserId = (pageContent: string): number | null => {
const [, userid = null] = pageContent.match(/data-userid="([\d]+)"/) || []
return userid ? +userid : null
}
15 changes: 15 additions & 0 deletions packages/crawlers/lib/Scrappers/ListCourses/ListCourses.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { routes } from "../../routes"
import { extractPageCourses } from "./extractPageCourses"

export const ListCourses = (crawlerFetch: ICrawlerFetch) => async () => {
const response = await crawlerFetch({ url: routes.myTabOverviewCourses() })

if (response.redirected) {
throw new Error()
}

const pageContent = await response.text()

return extractPageCourses(pageContent)
}
18 changes: 18 additions & 0 deletions packages/crawlers/lib/Scrappers/ListCourses/extractCourses.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { strcmp } from "@ava-pro/shared/lib/Utils/strcmp"
import { selectAll } from "css-select"
import { Document, Element } from "domhandler"
import { parseDocument } from "htmlparser2"
import { extractedCourseFromNode } from "./extractedCourseFromNode"

export const extractCourses = (data: string, baseSelector: string = "") => {
const dom = parseDocument(data, {})

const links = selectAll<Document, Element>(
`${baseSelector} #pc-for-in-progress .course-info-container a`.trim(),
dom
)

return links
.map(extractedCourseFromNode)
.sort((a, b) => strcmp(a.name, b.name))
}
15 changes: 15 additions & 0 deletions packages/crawlers/lib/Scrappers/ListCourses/extractPageCourses.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { IPageExtractedCourses } from "@ava-pro/shared/lib/Interfaces/IPageExtractedCourses"
import { extractCourses } from "./extractCourses"

enum SelectorsMyOverviewCourses {
IN_PROGRESS = "#myoverview_courses_view_in_progress",
FUTURE = "#myoverview_courses_view_future",
PAST = "#myoverview_courses_view_past"
}

export const extractPageCourses = (data: string): IPageExtractedCourses =>
({
PAST: extractCourses(data, SelectorsMyOverviewCourses.PAST),
IN_PROGRESS: extractCourses(data, SelectorsMyOverviewCourses.IN_PROGRESS),
FUTURE: extractCourses(data, SelectorsMyOverviewCourses.FUTURE)
} as any)
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { IExtractedCourseSlim } from "@ava-pro/shared/lib/Interfaces/IExtractedCourseSlim"
import { Element } from "domhandler"
import { getAttributeValue, innerText } from "domutils"
import normalizeUrl from "normalize-url"
import { getCourseIdFromURL } from "../getCourseIdFromURL"

export const extractedCourseFromNode = (
node: Element
): IExtractedCourseSlim => {
const name = innerText(node).trim()
const url = normalizeUrl(getAttributeValue(node, "href") ?? "#")
const courseId = getCourseIdFromURL(url)!
return { name, url, courseId }
}
27 changes: 27 additions & 0 deletions packages/crawlers/lib/Scrappers/Login/Login.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import { ICrawlerFetch } from "@ava-pro/shared/lib/Interfaces/ICrawlerFetch"
import { routes } from "../../routes"
import { getLoginState } from "../getLoginState"
import { Logout } from "../Logout/Logout"

export const Login =
(crawlerFetch: ICrawlerFetch) =>
async (username: string, password: string, skipLogout: boolean = false) => {
if (!skipLogout) {
await Logout(crawlerFetch)()
}

const body = new FormData()

body.set("username", username)
body.set("password", password)

const response = await crawlerFetch({
url: routes.login(),
options: {
body,
method: "post"
}
}).then((res) => res.text())

return getLoginState(response)
}
Loading

0 comments on commit bd35ffa

Please sign in to comment.