75 Commits

Author SHA1 Message Date
2b16a38a66 fix: improve performance 2024-05-13 15:06:23 -05:00
e87d81cc1d fix: generalize to MonadAff 2024-05-13 11:52:09 -05:00
b4b6dfdebd chore: prepare v2.0.0 2024-05-10 18:41:10 -05:00
d8b0039678 feat: rework API, use node-stream-pipes 2024-05-10 18:40:47 -05:00
d355d3b91c chore: prepare v1.2.19 2024-05-09 11:02:22 -05:00
eea77a3000 fix: maybe a race condition 2024-05-09 11:02:14 -05:00
3d8d722871 chore: prepare v1.2.18 2024-05-07 08:35:57 -05:00
aa7fb66b74 fix: prematurely ending 2024-05-07 08:35:48 -05:00
092276ed4c chore: prepare v1.2.17 2024-05-05 14:19:43 -05:00
bee63db2e7 fix: race condition or something 2024-05-05 14:19:34 -05:00
d6c6130279 chore: prepare v1.2.16 2024-05-03 15:51:53 -05:00
bbb78e2d95 fix: timeslice shenanigans 2024-05-03 15:51:45 -05:00
bf7404fe59 chore: prepare v1.2.15 2024-05-03 15:40:21 -05:00
6d41852361 fix: effect cb 2024-05-03 15:40:15 -05:00
35bd75fadf chore: prepare v1.2.14 2024-05-03 14:34:23 -05:00
c94adacc1a fix: timeslice shenanigans 2024-05-03 14:34:08 -05:00
a94781834d chore: prepare v1.2.13 2024-05-03 14:28:26 -05:00
499eb82986 fix: ?? 2024-05-03 14:28:23 -05:00
8e842feeef chore: prepare v1.2.12 2024-05-03 14:23:52 -05:00
52dd297c9c fix: what 2024-05-03 14:22:07 -05:00
39276c2546 chore: prepare v1.2.11 2024-05-03 14:05:54 -05:00
8ba33e88cf fix: maybe this has already been emitted 2024-05-03 14:05:51 -05:00
6b28c7fdf7 chore: prepare v1.2.10 2024-05-03 14:00:39 -05:00
093fff058d fix: add columns event 2024-05-03 14:00:35 -05:00
c8d83e8cf3 chore: prepare v1.2.9 2024-05-03 13:55:26 -05:00
559967d7a7 fix: scheduler nonsense 2024-05-03 13:55:16 -05:00
ebf048b431 chore: prepare v1.2.8 2024-05-03 13:45:33 -05:00
8201ede7c4 fix: reading the columns emits data 2024-05-03 13:45:27 -05:00
3a5553fd29 chore: prepare v1.2.7 2024-05-03 13:40:14 -05:00
42e779a2a7 fix: rework to just use streaming mode 2024-05-03 13:40:06 -05:00
2be8960836 chore: prepare v1.2.6 2024-05-03 13:06:39 -05:00
4c72c8f3b7 fix: bugs 2024-05-03 13:06:36 -05:00
a3be110749 chore: prepare v1.2.5 2024-05-03 13:00:45 -05:00
874503a300 fix: bug 2024-05-03 12:58:52 -05:00
05b61d84f0 chore: prepare v1.2.4 2024-05-03 12:47:28 -05:00
d6638ead1d fix: maybe this is faster? 2024-05-03 12:47:12 -05:00
f3d9ea8c11 chore: prepare v1.2.3 2024-05-03 10:58:52 -05:00
07c86f096f fix: timeslice shenanigans 2024-05-03 10:58:47 -05:00
a3625ab1b7 chore: prepare v1.2.2 2024-05-03 10:54:17 -05:00
87f42c3bfe fix: more efficiently / less blockingly read? 2024-05-03 10:54:12 -05:00
5f8a82c8d8 fix: replace unneeded parTraverse with traverse 2024-05-03 10:46:48 -05:00
cda17f8d3c chore: prepare v1.2.1 2024-05-03 10:44:42 -05:00
616ceabd9f fix: do not busy loop 2024-05-03 10:44:37 -05:00
eab713cd4e chore: prepare v1.2.0 2024-05-03 10:42:51 -05:00
e21260cd2c feat: Parser.foreach should concurrently process records as they are read
User-provided callback no longer blocks subsequent reads
2024-05-03 10:41:33 -05:00
02090c3129 chore: prepare v1.1.14 2024-05-02 13:17:17 -05:00
bb25b8f562 fix: streaming mode bug 2024-05-02 13:17:14 -05:00
e3c89adaed chore: prepare v1.1.13 2024-05-02 13:07:40 -05:00
854ceacba3 fix: race condition? 2024-05-02 13:07:33 -05:00
a29203ce14 chore: prepare v1.1.12 2024-05-02 13:06:14 -05:00
a7b46d632a fix: race condition? 2024-05-02 13:06:08 -05:00
ee7619b93d chore: prepare v1.1.11 2024-05-02 13:02:54 -05:00
3adbc63df1 fix: race condition? 2024-05-02 13:02:51 -05:00
4f0ddbf75c chore: prepare v1.1.10 2024-05-02 12:56:24 -05:00
1ee358a55b fix: canceler in foreach 2024-05-02 12:56:14 -05:00
30f127788b chore: prepare v1.1.9 2024-05-02 12:00:04 -05:00
1eb6f2242f fix: generalize parser/stringifier to MonadAff 2024-05-02 11:59:50 -05:00
03cc9eba28 chore: prepare v1.1.8 2024-05-01 16:45:30 -05:00
488ea405ff fix: parse numbers properly 2024-05-01 16:45:20 -05:00
bb2274bf19 chore: prepare v1.1.7 2024-05-01 11:05:46 -05:00
8eaad8a39c fix: more bugs 2024-05-01 10:35:29 -05:00
c80bcaa4d6 chore: prepare v1.1.6 2024-05-01 10:12:54 -05:00
cae11ace61 fix: more bugs 2024-05-01 10:12:51 -05:00
70d6ed44f6 chore: prepare v1.1.5 2024-05-01 10:02:50 -05:00
a17f0774f6 fix: more bugs 2024-05-01 10:02:45 -05:00
f2e2b4b9c8 chore: prepare v1.1.4 2024-05-01 10:01:25 -05:00
0a9beb46ea fix: more bugs 2024-05-01 10:01:22 -05:00
60346c9c10 chore: prepare v1.1.3 2024-05-01 09:55:28 -05:00
10fe682cc9 fix: use column order from first row 2024-05-01 09:55:21 -05:00
db425ea4d0 chore: prepare v1.1.2 2024-05-01 09:11:48 -05:00
7c64e66119 fix: readCSVRecord bug 2024-05-01 09:11:34 -05:00
8d2705c53e chore: prepare v1.1.1 2024-05-01 08:54:07 -05:00
3bc01c5afa fix: stringify options 2024-05-01 08:52:13 -05:00
67353391b2 chore: prepare v1.1.0 2024-04-30 19:10:11 -05:00
ad46b8daa7 feat: foreach 2024-04-30 19:09:52 -05:00
17 changed files with 634 additions and 183 deletions

2
.gitignore vendored
View File

@@ -1,4 +1,3 @@
bower_components/
node_modules/
.pulp-cache/
@@ -10,3 +9,4 @@ generated-docs/
.purs*
.psa*
.spago
.tmp/

BIN
bun.lockb

Binary file not shown.

View File

@@ -1,27 +1,27 @@
/** @type {(parser: string, ps: string[]) => import("bun").Subprocess} */
const prettier = (parser, ps) =>
Bun.spawn(['bun', 'x', 'prettier', '--write', '--parser', parser, ...ps], {
stdout: 'inherit',
stderr: 'inherit',
})
Bun.spawn(["bun", "x", "prettier", "--write", "--parser", parser, ...ps], {
stdout: "inherit",
stderr: "inherit",
});
const procs = [
prettier('babel', ['./src/**/*.js', './bun/**/*.js', './.prettierrc.cjs']),
prettier('json', ['./package.json', './jsconfig.json']),
prettier("babel", ["./src/**/*.js", "./bun/**/*.js", "./.prettierrc.cjs"]),
prettier("json", ["./package.json", "./jsconfig.json"]),
Bun.spawn(
[
'bun',
'x',
'purs-tidy',
'format-in-place',
'src/**/*.purs',
'test/**/*.purs',
"bun",
"x",
"purs-tidy",
"format-in-place",
"src/**/*.purs",
"test/**/*.purs",
],
{
stdout: 'inherit',
stderr: 'inherit',
stdout: "inherit",
stderr: "inherit",
},
),
]
];
await Promise.all(procs.map(p => p.exited))
await Promise.all(procs.map((p) => p.exited));

View File

@@ -1,34 +1,34 @@
import { readFile, writeFile } from 'fs/promises'
import { execSync } from 'child_process'
import { readFile, writeFile } from "fs/promises";
import { execSync } from "child_process";
let ver = process.argv[2]
let ver = process.argv[2];
if (!ver) {
console.error(`tag required: bun bun/prepare.js v1.0.0`)
process.exit(1)
console.error(`tag required: bun bun/prepare.js v1.0.0`);
process.exit(1);
} else if (!/v\d+\.\d+\.\d+/.test(ver)) {
console.error(`invalid tag: ${ver}`)
process.exit(1)
console.error(`invalid tag: ${ver}`);
process.exit(1);
}
ver = (/\d+\.\d+\.\d+/.exec(ver) || [])[0] || ''
ver = (/\d+\.\d+\.\d+/.exec(ver) || [])[0] || "";
const pkg = await readFile('./package.json', 'utf8')
const pkgnew = pkg.replace(/"version": ".+"/, `"version": "v${ver}"`)
await writeFile('./package.json', pkgnew)
const pkg = await readFile("./package.json", "utf8");
const pkgnew = pkg.replace(/"version": ".+"/, `"version": "v${ver}"`);
await writeFile("./package.json", pkgnew);
const spago = await readFile('./spago.yaml', 'utf8')
const spagonew = spago.replace(/version: .+/, `version: '${ver}'`)
await writeFile('./spago.yaml', spagonew)
const spago = await readFile("./spago.yaml", "utf8");
const spagonew = spago.replace(/version: .+/, `version: '${ver}'`);
await writeFile("./spago.yaml", spagonew);
const readme = await readFile('./README.md', 'utf8')
const readme = await readFile("./README.md", "utf8");
const readmenew = readme.replace(
/packages\/purescript-csv-stream\/.+?\//g,
`/packages/purescript-csv-stream/${ver}/`,
)
await writeFile('./README.md', readmenew)
);
await writeFile("./README.md", readmenew);
execSync(`git add spago.yaml package.json README.md`)
execSync(`git commit -m 'chore: prepare v${ver}'`)
execSync(`git tag v${ver}`)
execSync(`git push --tags`)
execSync(`git push --mirror github-mirror`)
execSync(`git add spago.yaml package.json README.md`);
execSync(`git commit -m 'chore: prepare v${ver}'`);
execSync(`git tag v${ver}`);
execSync(`git push --tags`);
execSync(`git push --mirror github-mirror`);

View File

@@ -1,9 +1,11 @@
{
"name": "purescript-csv-stream",
"version": "v1.0.2",
"version": "v2.0.0",
"type": "module",
"dependencies": {
"csv-parse": "^5.5.5",
"csv-stringify": "^6.4.6"
"csv-stringify": "^6.4.6",
"decimal.js": "^10.4.3"
},
"devDependencies": {
"typescript": "^5.4.5"

View File

@@ -8,7 +8,6 @@ workspace:
- bifunctors: ">=6.0.0 <7.0.0"
- datetime: ">=6.1.0 <7.0.0"
- effect: ">=4.0.0 <5.0.0"
- either: ">=6.1.0 <7.0.0"
- exceptions: ">=6.0.0 <7.0.0"
- foldable-traversable: ">=6.0.0 <7.0.0"
- foreign: ">=7.0.0 <8.0.0"
@@ -19,25 +18,39 @@ workspace:
- newtype: ">=5.0.0 <6.0.0"
- node-buffer: ">=9.0.0 <10.0.0"
- node-event-emitter: ">=3.0.0 <4.0.0"
- node-stream-pipes: ">=1.3.0 <2.0.0"
- node-streams: ">=9.0.0 <10.0.0"
- nullable: ">=6.0.0 <7.0.0"
- numbers: ">=9.0.1 <10.0.0"
- ordered-collections: ">=3.2.0 <4.0.0"
- pipes: ">=8.0.0 <9.0.0"
- precise-datetime: ">=7.0.0 <8.0.0"
- prelude: ">=6.0.1 <7.0.0"
- record: ">=4.0.0 <5.0.0"
- record-extra: ">=5.0.1 <6.0.0"
- st: ">=6.2.0 <7.0.0"
- strings: ">=6.0.1 <7.0.0"
- tailrec: ">=6.1.0 <7.0.0"
- transformers: ">=6.0.0 <7.0.0"
- tuples: ">=7.0.0 <8.0.0"
- typelevel-prelude: ">=7.0.0 <8.0.0"
- unsafe-coerce: ">=6.0.0 <7.0.0"
test_dependencies:
- console
- gen
- node-fs
- node-zlib
- quickcheck
- simple-json
- spec
build_plan:
- aff
- ansi
- arraybuffer-types
- arrays
- avar
- bifunctors
- catenable-lists
- console
- const
- contravariant
@@ -54,7 +67,9 @@ workspace:
- foldable-traversable
- foreign
- foreign-object
- fork
- formatters
- free
- functions
- functors
- gen
@@ -63,12 +78,18 @@ workspace:
- invariant
- js-date
- lazy
- lcg
- lists
- maybe
- mmorph
- newtype
- node-buffer
- node-event-emitter
- node-fs
- node-path
- node-stream-pipes
- node-streams
- node-zlib
- nonempty
- now
- nullable
@@ -78,12 +99,18 @@ workspace:
- parallel
- parsing
- partial
- pipes
- precise-datetime
- prelude
- profunctor
- quickcheck
- random
- record
- record-extra
- refs
- safe-coerce
- simple-json
- spec
- st
- strings
- tailrec
@@ -93,7 +120,9 @@ workspace:
- typelevel-prelude
- unfoldable
- unicode
- unordered-collections
- unsafe-coerce
- variant
extra_packages: {}
packages:
aff:
@@ -118,6 +147,14 @@ packages:
- tailrec
- transformers
- unsafe-coerce
ansi:
type: registry
version: 7.0.0
integrity: sha256-ZMB6HD+q9CXvn9fRCmJ8dvuDrOVHcjombL3oNOerVnE=
dependencies:
- foldable-traversable
- lists
- strings
arraybuffer-types:
type: registry
version: 3.0.2
@@ -142,6 +179,17 @@ packages:
- tuples
- unfoldable
- unsafe-coerce
avar:
type: registry
version: 5.0.0
integrity: sha256-e7hf0x4hEpcygXP0LtvfvAQ49Bbj2aWtZT3gqM///0A=
dependencies:
- aff
- effect
- either
- exceptions
- functions
- maybe
bifunctors:
type: registry
version: 6.0.0
@@ -152,6 +200,18 @@ packages:
- newtype
- prelude
- tuples
catenable-lists:
type: registry
version: 7.0.0
integrity: sha256-76vYENhwF4BWTBsjeLuErCH2jqVT4M3R1HX+4RwSftA=
dependencies:
- control
- foldable-traversable
- lists
- maybe
- prelude
- tuples
- unfoldable
console:
type: registry
version: 6.1.0
@@ -322,6 +382,12 @@ packages:
- tuples
- typelevel-prelude
- unfoldable
fork:
type: registry
version: 6.0.0
integrity: sha256-X7u0SuCvFbLbzuNEKLBNuWjmcroqMqit4xEzpQwAP7E=
dependencies:
- aff
formatters:
type: registry
version: 7.0.0
@@ -334,6 +400,25 @@ packages:
- parsing
- prelude
- transformers
free:
type: registry
version: 7.1.0
integrity: sha256-JAumgEsGSzJCNLD8AaFvuX7CpqS5yruCngi6yI7+V5k=
dependencies:
- catenable-lists
- control
- distributive
- either
- exists
- foldable-traversable
- invariant
- lazy
- maybe
- prelude
- tailrec
- transformers
- tuples
- unsafe-coerce
functions:
type: registry
version: 6.0.0
@@ -417,6 +502,17 @@ packages:
- foldable-traversable
- invariant
- prelude
lcg:
type: registry
version: 4.0.0
integrity: sha256-h7ME5cthLfbgJOJdsZcSfFpwXsx4rf8YmhebU+3iSYg=
dependencies:
- effect
- integers
- maybe
- partial
- prelude
- random
lists:
type: registry
version: 7.0.0
@@ -443,6 +539,14 @@ packages:
- invariant
- newtype
- prelude
mmorph:
type: registry
version: 7.0.0
integrity: sha256-urZlZNNqGeQFe5D/ClHlR8QgGBNHTMFPtJ5S5IpflTQ=
dependencies:
- free
- functors
- transformers
newtype:
type: registry
version: 5.0.0
@@ -473,6 +577,67 @@ packages:
- nullable
- prelude
- unsafe-coerce
node-fs:
type: registry
version: 9.1.0
integrity: sha256-TzhvGdrwcM0bazDvrWSqh+M/H8GKYf1Na6aGm2Qg4+c=
dependencies:
- datetime
- effect
- either
- enums
- exceptions
- functions
- integers
- js-date
- maybe
- node-buffer
- node-path
- node-streams
- nullable
- partial
- prelude
- strings
- unsafe-coerce
node-path:
type: registry
version: 5.0.0
integrity: sha256-pd82nQ+2l5UThzaxPdKttgDt7xlsgIDLpPG0yxDEdyE=
dependencies:
- effect
node-stream-pipes:
type: registry
version: 1.3.0
integrity: sha256-5Jpf0BLn0ExQWYxbTTewai4M8quEmEVHxihc9CM1Juo=
dependencies:
- aff
- arrays
- effect
- either
- exceptions
- foldable-traversable
- foreign-object
- lists
- maybe
- mmorph
- newtype
- node-buffer
- node-event-emitter
- node-fs
- node-path
- node-streams
- node-zlib
- ordered-collections
- parallel
- pipes
- prelude
- st
- strings
- tailrec
- transformers
- tuples
- unordered-collections
- unsafe-coerce
node-streams:
type: registry
version: 9.0.0
@@ -486,6 +651,19 @@ packages:
- node-event-emitter
- nullable
- prelude
node-zlib:
type: registry
version: 0.4.0
integrity: sha256-kYSajFQFzWVg71l5/y4w4kXdTr5EJoqyV3D2RqmAjQ4=
dependencies:
- aff
- effect
- either
- functions
- node-buffer
- node-streams
- prelude
- unsafe-coerce
nonempty:
type: registry
version: 7.0.0
@@ -593,6 +771,18 @@ packages:
version: 4.0.0
integrity: sha256-fwXerld6Xw1VkReh8yeQsdtLVrjfGiVuC5bA1Wyo/J4=
dependencies: []
pipes:
type: registry
version: 8.0.0
integrity: sha256-kvfqGM4cPA/wCcBHbp5psouFw5dZGvku2462x7ZBwSY=
dependencies:
- aff
- lists
- mmorph
- prelude
- tailrec
- transformers
- tuples
precise-datetime:
type: registry
version: 7.0.0
@@ -633,6 +823,45 @@ packages:
- newtype
- prelude
- tuples
quickcheck:
type: registry
version: 8.0.1
integrity: sha256-ZvpccKQCvgslTXZCNmpYW4bUsFzhZd/kQUr2WmxFTGY=
dependencies:
- arrays
- console
- control
- effect
- either
- enums
- exceptions
- foldable-traversable
- gen
- identity
- integers
- lazy
- lcg
- lists
- maybe
- newtype
- nonempty
- numbers
- partial
- prelude
- record
- st
- strings
- tailrec
- transformers
- tuples
- unfoldable
random:
type: registry
version: 6.0.0
integrity: sha256-CJ611a35MPCE7XQMp0rdC6MCn76znlhisiCRgboAG+Q=
dependencies:
- effect
- integers
record:
type: registry
version: 4.0.0
@@ -641,6 +870,18 @@ packages:
- functions
- prelude
- unsafe-coerce
record-extra:
type: registry
version: 5.0.1
integrity: sha256-7vnREK2fpGJ7exswSeA9UpZFuU+UXRt3SA7AFUldT/Y=
dependencies:
- arrays
- functions
- lists
- prelude
- record
- tuples
- typelevel-prelude
refs:
type: registry
version: 6.0.0
@@ -654,6 +895,52 @@ packages:
integrity: sha256-a1ibQkiUcbODbLE/WAq7Ttbbh9ex+x33VCQ7GngKudU=
dependencies:
- unsafe-coerce
simple-json:
type: registry
version: 9.0.0
integrity: sha256-K3RJaThqsszTd+TEklzZmAdDqvIHWgXIfKqlsoykU1c=
dependencies:
- arrays
- exceptions
- foreign
- foreign-object
- nullable
- prelude
- record
- typelevel-prelude
- variant
spec:
type: registry
version: 7.6.0
integrity: sha256-+merGdQbL9zWONbnt8S8J9afGJ59MQqGtS0qSd3yu4I=
dependencies:
- aff
- ansi
- arrays
- avar
- bifunctors
- control
- datetime
- effect
- either
- exceptions
- foldable-traversable
- fork
- identity
- integers
- lists
- maybe
- newtype
- now
- ordered-collections
- parallel
- pipes
- prelude
- refs
- strings
- tailrec
- transformers
- tuples
st:
type: registry
version: 6.2.0
@@ -754,8 +1041,36 @@ packages:
- foldable-traversable
- maybe
- strings
unordered-collections:
type: registry
version: 3.1.0
integrity: sha256-H2eQR+ylI+cljz4XzWfEbdF7ee+pnw2IZCeq69AuJ+Q=
dependencies:
- arrays
- enums
- functions
- integers
- lists
- prelude
- record
- tuples
- typelevel-prelude
- unfoldable
unsafe-coerce:
type: registry
version: 6.0.0
integrity: sha256-IqIYW4Vkevn8sI+6aUwRGvd87tVL36BBeOr0cGAE7t0=
dependencies: []
variant:
type: registry
version: 8.0.0
integrity: sha256-SR//zQDg2dnbB8ZHslcxieUkCeNlbMToapvmh9onTtw=
dependencies:
- enums
- lists
- maybe
- partial
- prelude
- record
- tuples
- unsafe-coerce

View File

@@ -1,7 +1,7 @@
package:
name: csv-stream
publish:
version: '1.0.2'
version: '2.0.0'
license: 'GPL-3.0-or-later'
location:
githubOwner: 'cakekindel'
@@ -10,12 +10,12 @@ package:
strict: true
pedanticPackages: true
dependencies:
- node-stream-pipes: ">=1.3.0 <2.0.0"
- aff: ">=7.1.0 <8.0.0"
- arrays: ">=7.3.0 <8.0.0"
- bifunctors: ">=6.0.0 <7.0.0"
- datetime: ">=6.1.0 <7.0.0"
- effect: ">=4.0.0 <5.0.0"
- either: ">=6.1.0 <7.0.0"
- exceptions: ">=6.0.0 <7.0.0"
- foldable-traversable: ">=6.0.0 <7.0.0"
- foreign: ">=7.0.0 <8.0.0"
@@ -29,18 +29,28 @@ package:
- node-streams: ">=9.0.0 <10.0.0"
- nullable: ">=6.0.0 <7.0.0"
- numbers: ">=9.0.1 <10.0.0"
- ordered-collections: ">=3.2.0 <4.0.0"
- pipes: ">=8.0.0 <9.0.0"
- precise-datetime: ">=7.0.0 <8.0.0"
- prelude: ">=6.0.1 <7.0.0"
- record: ">=4.0.0 <5.0.0"
- record-extra: ">=5.0.1 <6.0.0"
- st: ">=6.2.0 <7.0.0"
- strings: ">=6.0.1 <7.0.0"
- tailrec: ">=6.1.0 <7.0.0"
- transformers: ">=6.0.0 <7.0.0"
- tuples: ">=7.0.0 <8.0.0"
- typelevel-prelude: ">=7.0.0 <8.0.0"
- unsafe-coerce: ">=6.0.0 <7.0.0"
test:
main: Test.Main
dependencies:
- console
- gen
- node-fs
- node-zlib
- quickcheck
- simple-json
- spec
workspace:
extraPackages: {}

View File

@@ -7,8 +7,10 @@ import Control.Monad.Except (Except)
import Data.Array as Array
import Data.CSV (class ReadCSV, class WriteCSV, readCSV, writeCSV)
import Data.List.NonEmpty (NonEmptyList)
import Data.Map (Map)
import Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Symbol (class IsSymbol)
import Data.Symbol (class IsSymbol, reflectSymbol)
import Foreign (ForeignError(..))
import Prim.Row (class Cons, class Lacks)
import Prim.RowList (class RowToList, Cons, Nil, RowList)
@@ -20,25 +22,29 @@ class RowToList r rl <= WriteCSVRecord r rl | rl -> r where
writeCSVRecord :: { | r } -> Array String
instance (RowToList r (Cons k v tailrl), IsSymbol k, WriteCSV v, Lacks k tail, Cons k v tail r, WriteCSVRecord tail tailrl) => WriteCSVRecord r (Cons k v tailrl) where
writeCSVRecord r = let
val = writeCSV $ Record.get (Proxy @k) r
tail = writeCSVRecord @tail @tailrl $ Record.delete (Proxy @k) r
in
[val] <> tail
writeCSVRecord r =
let
val = writeCSV $ Record.get (Proxy @k) r
tail = writeCSVRecord @tail @tailrl $ Record.delete (Proxy @k) r
in
[ val ] <> tail
instance WriteCSVRecord () Nil where
writeCSVRecord _ = []
class ReadCSVRecord :: Row Type -> RowList Type -> Constraint
class RowToList r rl <= ReadCSVRecord r rl | rl -> r where
readCSVRecord :: Array String -> Except (NonEmptyList ForeignError) { | r }
readCSVRecord :: Map String Int -> Array String -> Except (NonEmptyList ForeignError) { | r }
instance (RowToList r (Cons k v tailrl), IsSymbol k, ReadCSV v, Lacks k tail, Cons k v tail r, ReadCSVRecord tail tailrl) => ReadCSVRecord r (Cons k v tailrl) where
readCSVRecord vals = do
valraw <- liftMaybe (pure $ ForeignError "unexpected end of record") $ Array.head vals
readCSVRecord cols vals = do
let
k = reflectSymbol (Proxy @k)
pos <- liftMaybe (pure $ ForeignError $ "row too long; did not expect value " <> k) $ Map.lookup k cols
let valraw = fromMaybe "" $ Array.index vals pos
val <- readCSV @v valraw
tail <- readCSVRecord @tail @tailrl (fromMaybe [] $ Array.tail vals)
tail <- readCSVRecord @tail @tailrl cols vals
pure $ Record.insert (Proxy @k) val tail
instance ReadCSVRecord () Nil where
readCSVRecord _ = pure {}
readCSVRecord _ _ = pure {}

View File

@@ -9,11 +9,12 @@ import Data.Int as Int
import Data.List.NonEmpty (NonEmptyList)
import Data.Maybe (Maybe(..), maybe)
import Data.Newtype (unwrap)
import Data.Number (fromString) as Number
import Data.Number.Format (toString) as Number
import Data.PreciseDateTime (fromDateTime, fromRFC3339String, toDateTimeLossy, toRFC3339String)
import Data.RFC3339String (RFC3339String(..))
import Data.String as String
import Foreign (ForeignError(..), readInt, readNumber, unsafeToForeign)
import Foreign (ForeignError(..))
class ReadCSV a where
readCSV :: String -> Except (NonEmptyList ForeignError) a
@@ -22,10 +23,10 @@ class WriteCSV a where
writeCSV :: a -> String
instance ReadCSV Int where
readCSV = readInt <<< unsafeToForeign
readCSV s = liftMaybe (pure $ ForeignError $ "invalid integer: " <> s) $ Int.fromString s
instance ReadCSV Number where
readCSV = readNumber <<< unsafeToForeign
readCSV s = liftMaybe (pure $ ForeignError $ "invalid number: " <> s) $ Number.fromString s
instance ReadCSV String where
readCSV = pure

View File

@@ -1,7 +1,7 @@
import {parse} from 'csv-parse'
import { Parser } from "csv-parse";
/** @type {(s: import('csv-parse').Options) => () => import('csv-parse').Parser} */
export const makeImpl = c => () => parse(c)
/** @type {(s: import('csv-parse').Options) => () => Parser} */
export const makeImpl = (c) => () => new Parser(c);
/** @type {(s: import('stream').Duplex) => () => string[] | null} */
export const readImpl = s => () => s.read();
/** @type {(s: Parser) => () => Array<string> | null} */
export const readImpl = (p) => () => p.read();

View File

@@ -1,36 +1,19 @@
module Node.Stream.CSV.Parse where
import Prelude
import Prelude hiding (join)
import Control.Monad.Error.Class (liftEither)
import Control.Monad.Except (runExcept)
import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT)
import Control.Monad.Rec.Class (whileJust)
import Control.Monad.ST.Global as ST
import Data.Array.ST as Array.ST
import Data.Bifunctor (lmap)
import Data.CSV.Record (class ReadCSVRecord, readCSVRecord)
import Data.Either (Either(..))
import Data.Maybe (Maybe(..))
import Data.Nullable (Nullable)
import Data.Nullable as Nullable
import Data.Traversable (for_)
import Effect (Effect)
import Effect.Aff (Aff, makeAff)
import Effect.Class (liftEffect)
import Effect.Exception (error)
import Effect.Uncurried (mkEffectFn1)
import Foreign (Foreign, unsafeToForeign)
import Foreign.Object (Object)
import Foreign.Object as Object
import Node.Encoding (Encoding(..))
import Foreign.Object (union) as Object
import Node.Buffer (Buffer)
import Node.EventEmitter (EventHandle(..))
import Node.EventEmitter as Event
import Node.EventEmitter.UtilTypes (EventHandle1)
import Node.Stream (Read, Stream, Write)
import Node.Stream as Stream
import Node.Stream.Object (Transform) as Object
import Prim.Row (class Union)
import Prim.RowList (class RowToList)
import Unsafe.Coerce (unsafeCoerce)
data CSVRead
@@ -39,12 +22,9 @@ data CSVRead
-- | into parsed purescript objects.
-- |
-- | The CSV contents may be piped into this stream
-- | as Buffer or String encoded chunks.
-- |
-- | Records can be read with `read` when `Node.Stream.readable`
-- | is true.
type CSVParser :: Row Type -> Row Type -> Type
type CSVParser a r = Stream (read :: Read, write :: Write, csv :: CSVRead | r)
-- | as Buffer or String chunks.
type CSVParser :: Row Type -> Type
type CSVParser r = Stream (read :: Read, write :: Write, csv :: CSVRead | r)
-- | https://csv.js.org/parse/options/
type Config r =
@@ -76,47 +56,14 @@ type Config r =
)
-- | Create a CSVParser
make :: forall @r rl @config @missing @extra. RowToList r rl => ReadCSVRecord r rl => Union config missing (Config extra) => { | config } -> Effect (CSVParser r ())
make = makeImpl <<< unsafeToForeign <<< Object.union (recordToForeign {columns: true, cast: false, cast_date: false}) <<< recordToForeign
make :: forall @config @missing @extra. Union config missing (Config extra) => { | config } -> Effect (CSVParser ())
make = makeImpl <<< unsafeToForeign <<< Object.union (recordToForeign { columns: false, cast: false, cast_date: false }) <<< recordToForeign
-- | Synchronously parse a CSV string
parse :: forall @r rl @config missing extra. RowToList r rl => ReadCSVRecord r rl => Union config missing (Config extra) => { | config } -> String -> Aff (Array { | r })
parse config csv = do
stream <- liftEffect $ make @r @config @missing @extra config
void $ liftEffect $ Stream.writeString stream UTF8 csv
liftEffect $ Stream.end stream
readAll stream
-- | Reads a parsed record from the stream.
-- |
-- | Returns `Nothing` when either:
-- | - The internal buffer of parsed records has been exhausted, but there will be more (`Node.Stream.readable` and `Node.Stream.closed` are both `false`)
-- | - All records have been processed (`Node.Stream.closed` is `true`)
read :: forall @r rl a. RowToList r rl => ReadCSVRecord r rl => CSVParser r a -> Effect (Maybe { | r })
read stream = runMaybeT do
raw :: Array String <- MaybeT $ Nullable.toMaybe <$> readImpl stream
liftEither $ lmap (error <<< show) $ runExcept $ readCSVRecord @r @rl raw
-- | Collect all parsed records into an array
readAll :: forall @r rl a. RowToList r rl => ReadCSVRecord r rl => CSVParser r a -> Aff (Array { | r })
readAll stream = do
records <- liftEffect $ ST.toEffect $ Array.ST.new
whileJust do
isReadable <- liftEffect $ Stream.readable stream
when (not isReadable) $ makeAff \res -> mempty <* flip (Event.once Stream.readableH) stream $ res $ Right unit
liftEffect $ whileJust do
r <- read @r stream
for_ r \r' -> ST.toEffect $ Array.ST.push r' records
pure $ void r
isClosed <- liftEffect $ Stream.closed stream
pure $ if isClosed then Nothing else Just unit
liftEffect $ ST.toEffect $ Array.ST.unsafeFreeze records
toObjectStream :: CSVParser () -> Object.Transform Buffer (Array String)
toObjectStream = unsafeCoerce
-- | `data` event. Emitted when a CSV record has been parsed.
dataH :: forall r a. EventHandle1 (CSVParser r a) { | r }
dataH :: forall a. EventHandle1 (CSVParser a) (Array String)
dataH = EventHandle "data" mkEffectFn1
-- | FFI

View File

@@ -1,7 +1,7 @@
import {stringify} from 'csv-stringify'
import { stringify } from "csv-stringify";
/** @type {(c: import('csv-stringify').Options) => () => import('csv-stringify').Stringifier} */
export const makeImpl = c => () => stringify(c)
export const makeImpl = (c) => () => stringify(c);
/** @type {(s: import('csv-stringify').Stringifier) => (vals: Array<string>) => () => void} */
export const writeImpl = s => vals => () => s.write(vals)
export const writeImpl = (s) => (vals) => () => s.write(vals);

View File

@@ -2,24 +2,14 @@ module Node.Stream.CSV.Stringify where
import Prelude
import Control.Monad.Rec.Class (whileJust)
import Control.Monad.ST.Global as ST
import Data.Array.ST as Array.ST
import Data.CSV.Record (class WriteCSVRecord, writeCSVRecord)
import Data.Either (Either(..), blush)
import Data.Foldable (class Foldable, fold)
import Data.Maybe (Maybe(..))
import Data.String.Regex (Regex)
import Data.Traversable (for_)
import Effect (Effect)
import Effect.Aff (Aff, makeAff)
import Effect.Class (liftEffect)
import Foreign (Foreign, unsafeToForeign)
import Foreign.Object (Object)
import Foreign.Object as Object
import Node.EventEmitter as Event
import Foreign.Object (union) as Object
import Node.Stream (Read, Stream, Write)
import Node.Stream as Stream
import Node.Stream.Object (Transform) as Object
import Prim.Row (class Union)
import Prim.RowList (class RowToList)
import Unsafe.Coerce (unsafeCoerce)
@@ -34,18 +24,16 @@ data CSVWrite
-- | Stringified rows are emitted on the `Readable` end as string
-- | chunks, meaning it can be treated as a `Node.Stream.Readable`
-- | that has had `setEncoding UTF8` invoked on it.
type CSVStringifier :: Row Type -> Row Type -> Type
type CSVStringifier a r = Stream (read :: Read, write :: Write, csv :: CSVWrite | r)
type CSVStringifier :: Row Type -> Type
type CSVStringifier r = Stream (read :: Read, write :: Write, csv :: CSVWrite | r)
-- | https://csv.js.org/stringify/options/
type Config r =
( bom :: Boolean
, group_columns_by_name :: Boolean
, delimiter :: String
, record_delimiter :: String
, escape :: String
, escape_formulas :: Boolean
, header :: Boolean
, quote :: String
, quoted :: Boolean
, quoted_empty :: Boolean
@@ -60,41 +48,36 @@ foreign import writeImpl :: forall r. Stream r -> Array String -> Effect Unit
recordToForeign :: forall r. Record r -> Object Foreign
recordToForeign = unsafeCoerce
-- | Create a CSVStringifier
make :: forall @r rl @config @missing @extra. RowToList r rl => WriteCSVRecord r rl => Union config missing (Config extra) => { | config } -> Effect (CSVStringifier r ())
make = makeImpl <<< unsafeToForeign <<< Object.union (recordToForeign {columns: true, cast: false, cast_date: false}) <<< recordToForeign
-- | Create a raw Transform stream that accepts chunks of `Array String`,
-- | and transforms them into string CSV rows.
-- |
-- | Requires an ordered array of column names.
make
:: forall @config @missing @extra
. Union config missing (Config extra)
=> Array String
-> { | config }
-> Effect (CSVStringifier ())
make columns =
makeImpl
<<< unsafeToForeign
<<< Object.union (recordToForeign { columns, header: true })
<<< recordToForeign
-- | Synchronously stringify a collection of records
stringify :: forall @r rl f @config missing extra. Foldable f => RowToList r rl => WriteCSVRecord r rl => Union config missing (Config extra) => { | config } -> f { | r } -> Aff String
stringify config records = do
stream <- liftEffect $ make @r @config @missing @extra config
liftEffect $ for_ records \r -> write stream r
liftEffect $ Stream.end stream
readAll stream
-- | Convert the raw stream to a typed ObjectStream
toObjectStream :: CSVStringifier () -> Object.Transform (Array String) String
toObjectStream = unsafeCoerce
-- | Write a record to a CSVStringifier.
-- |
-- | The record will be emitted on the `Readable` end
-- | of the stream as a string chunk.
write :: forall @r rl a. RowToList r rl => WriteCSVRecord r rl => CSVStringifier r a -> { | r } -> Effect Unit
write :: forall @r rl a. RowToList r rl => WriteCSVRecord r rl => CSVStringifier a -> { | r } -> Effect Unit
write s = writeImpl s <<< writeCSVRecord @r @rl
-- | Read the stringified chunks until end-of-stream, returning the entire CSV string.
readAll :: forall r a. CSVStringifier r a -> Aff String
readAll stream = do
chunks <- liftEffect $ ST.toEffect $ Array.ST.new
whileJust do
isReadable <- liftEffect $ Stream.readable stream
when (not isReadable) $ makeAff \res -> mempty <* flip (Event.on Stream.readableH) stream $ res $ Right unit
liftEffect $ whileJust do
s <- (join <<< map blush) <$> Stream.readEither stream
for_ s \s' -> ST.toEffect $ Array.ST.push s' chunks
pure $ void s
isClosed <- liftEffect $ Stream.closed stream
pure $ if isClosed then Nothing else Just unit
chunks' <- liftEffect $ ST.toEffect $ Array.ST.unsafeFreeze chunks
pure $ fold chunks'
-- | Write a record to a CSVStringifier.
-- |
-- | The record will be emitted on the `Readable` end
-- | of the stream as a string chunk.
writeRaw :: forall a. CSVStringifier a -> Array String -> Effect Unit
writeRaw = writeImpl

View File

@@ -1,2 +1 @@
module Node.Stream.CSV where

98
src/Pipes.CSV.purs Normal file
View File

@@ -0,0 +1,98 @@
module Pipes.CSV where
import Prelude
import Control.Monad.Error.Class (liftEither)
import Control.Monad.Except (runExcept)
import Control.Monad.Rec.Class (forever)
import Control.Monad.ST.Global as ST
import Control.Monad.ST.Ref as STRef
import Data.Array as Array
import Data.Bifunctor (lmap)
import Data.CSV.Record (class ReadCSVRecord, class WriteCSVRecord, readCSVRecord, writeCSVRecord)
import Data.FunctorWithIndex (mapWithIndex)
import Data.Map as Map
import Data.Maybe (Maybe(..))
import Data.Tuple.Nested ((/\))
import Effect.Aff (Aff)
import Effect.Class (liftEffect)
import Effect.Exception (error)
import Node.Buffer (Buffer)
import Node.Stream.CSV.Parse as CSV.Parse
import Node.Stream.CSV.Stringify as CSV.Stringify
import Pipes (await, yield, (>->))
import Pipes.Core (Pipe)
import Pipes.Node.Stream as Pipes.Stream
import Prim.RowList (class RowToList)
import Record.Extra (class Keys, keys)
import Type.Prelude (Proxy(..))
-- | Transforms buffer chunks of a CSV file to parsed
-- | records of `r`.
-- |
-- | ```
-- | -- == my-data.csv.gz ==
-- | -- id,foo,is_deleted
-- | -- 1,hi,f
-- | -- 2,bye,t
-- |
-- | rows
-- | :: Array {id :: Int, foo :: String, is_deleted :: Boolean}
-- | <- map Array.fromFoldable
-- | $ Pipes.toListM
-- | $ Pipes.Node.Stream.unEOS
-- | $ Pipes.Node.FS.read "my-data.csv.gz"
-- | >-> Pipes.Node.Zlib.gunzip
-- | >-> Pipes.CSV.parse
-- | rows `shouldEqual` [{id: 1, foo: "hi", is_deleted: false}, {id: 2, foo: "bye", is_deleted: true}]
-- | ```
parse
:: forall @r rl
. RowToList r rl
=> ReadCSVRecord r rl
=> Pipe (Maybe Buffer) (Maybe { | r }) Aff Unit
parse = do
raw <- liftEffect $ CSV.Parse.make {}
colsST <- liftEffect $ ST.toEffect $ STRef.new Nothing
let
readCols = liftEffect $ ST.toEffect $ STRef.read colsST
putCols a = void $ liftEffect $ ST.toEffect $ STRef.write (Just a) colsST
parse' a cols' = liftEither $ lmap (error <<< show) $ runExcept $ readCSVRecord @r @rl cols' a
firstRow a = putCols $ Map.fromFoldable $ mapWithIndex (flip (/\)) a
row a cols' = yield =<< parse' a cols'
unmarshal = forever do
r <- await
cols <- readCols
case cols of
Just cols' -> row r cols'
Nothing -> firstRow r
parser = Pipes.Stream.fromTransform $ CSV.Parse.toObjectStream raw
parser >-> Pipes.Stream.inEOS unmarshal
-- | Transforms buffer chunks of a CSV file to parsed
-- | arrays of CSV values.
parseRaw :: Pipe (Maybe Buffer) (Maybe (Array String)) Aff Unit
parseRaw = do
s <- liftEffect $ CSV.Parse.toObjectStream <$> CSV.Parse.make {}
Pipes.Stream.fromTransform s
-- | Transforms CSV rows into stringified CSV records
-- | using the given ordered array of column names.
stringifyRaw :: Array String -> Pipe (Maybe (Array String)) (Maybe String) Aff Unit
stringifyRaw columns = do
s <- liftEffect $ CSV.Stringify.toObjectStream <$> CSV.Stringify.make columns {}
Pipes.Stream.fromTransform s
-- | Transforms purescript records into stringified CSV records.
-- |
-- | Columns are inferred from the record's keys, ordered alphabetically.
stringify :: forall r rl. WriteCSVRecord r rl => RowToList r rl => Keys rl => Pipe (Maybe { | r }) (Maybe String) Aff Unit
stringify = do
raw <- liftEffect $ CSV.Stringify.make (Array.fromFoldable $ keys $ Proxy @r) {}
let
printer = Pipes.Stream.fromTransform $ CSV.Stringify.toObjectStream raw
marshal = forever $ yield =<< (writeCSVRecord @r @rl <$> await)
Pipes.Stream.inEOS marshal >-> printer

View File

@@ -2,11 +2,13 @@ module Test.Main where
import Prelude
import Data.Maybe (Maybe(..))
import Effect (Effect)
import Effect.Class.Console (log)
import Effect.Aff (launchAff_)
import Test.Pipes.CSV as Test.Pipes.CSV
import Test.Spec.Reporter (specReporter)
import Test.Spec.Runner (defaultConfig, runSpec')
main :: Effect Unit
main = do
log "🍕"
log "You should add some tests."
main = launchAff_ $ runSpec' (defaultConfig { failFast = true, timeout = Nothing }) [ specReporter ] do
Test.Pipes.CSV.spec

88
test/Test/Pipes.CSV.purs Normal file
View File

@@ -0,0 +1,88 @@
module Test.Pipes.CSV where
import Prelude
import Control.Monad.Gen (chooseInt)
import Control.Monad.Rec.Class (Step(..), tailRecM)
import Data.Array as Array
import Data.DateTime (DateTime)
import Data.Foldable (fold, sum)
import Data.Maybe (Maybe(..), fromJust)
import Data.Newtype (wrap)
import Data.PreciseDateTime (fromRFC3339String, toDateTimeLossy)
import Data.String.CodePoints as String.CodePoints
import Data.Tuple.Nested ((/\))
import Effect.Class (liftEffect)
import Effect.Console (log)
import Node.Encoding (Encoding(..))
import Partial.Unsafe (unsafePartial)
import Pipes (yield, (>->))
import Pipes.CSV as Pipes.CSV
import Pipes.Collect as Pipes.Collect
import Pipes.Construct as Pipes.Construct
import Pipes.Node.Buffer as Pipes.Buffer
import Pipes.Node.Stream as Pipes.Stream
import Pipes.Prelude (chain, map, toListM) as Pipes
import Pipes.Util as Pipes.Util
import Test.QuickCheck.Gen (randomSample')
import Test.Spec (Spec, before, describe, it)
import Test.Spec.Assertions (shouldEqual)
csv :: String
csv =
"""created,flag,foo,id
2020-01-01T00:00:00.0Z,true,a,1
2024-02-02T08:00:00.0Z,false,apple,2
1970-01-01T00:00:00.0Z,true,hello,3
"""
dt :: String -> DateTime
dt = toDateTimeLossy <<< unsafePartial fromJust <<< fromRFC3339String <<< wrap
spec :: Spec Unit
spec =
describe "Pipes.CSV" do
it "stringify" do
let
objs =
[ { id: 1, foo: "a", flag: true, created: dt "2020-01-01T00:00:00Z" }
, { id: 2, foo: "apple", flag: false, created: dt "2024-02-02T08:00:00Z" }
, { id: 3, foo: "hello", flag: true, created: dt "1970-01-01T00:00:00Z" }
]
csv' <- map fold $ Pipes.Collect.toArray $ Pipes.Stream.withEOS (Pipes.Construct.eachArray objs) >-> Pipes.CSV.stringify >-> Pipes.Stream.unEOS
csv' `shouldEqual` csv
describe "parse" do
it "parses csv" do
rows <- map Array.fromFoldable
$ Pipes.toListM
$ Pipes.Stream.withEOS (yield csv)
>-> Pipes.Stream.inEOS (Pipes.Buffer.fromString UTF8)
>-> Pipes.CSV.parse
>-> Pipes.Stream.unEOS
rows `shouldEqual`
[ { id: 1, foo: "a", flag: true, created: dt "2020-01-01T00:00:00Z" }
, { id: 2, foo: "apple", flag: false, created: dt "2024-02-02T08:00:00Z" }
, { id: 3, foo: "hello", flag: true, created: dt "1970-01-01T00:00:00Z" }
]
before
(do
nums <- liftEffect $ randomSample' 100000 (chooseInt 0 9)
let
chars = [ "i","d","\n" ] <> join ((\n -> [show n, "\n"]) <$> nums)
bufs <- Pipes.Collect.toArray
$ Pipes.Stream.withEOS (Pipes.Construct.eachArray chars)
>-> Pipes.Util.chunked 1000
>-> Pipes.Stream.inEOS (Pipes.map fold >-> Pipes.Buffer.fromString UTF8)
>-> Pipes.Stream.unEOS
pure $ nums /\ bufs
)
$ it "parses large csv" \(nums /\ bufs) -> do
rows <-
Pipes.Collect.toArray
$ Pipes.Stream.withEOS (Pipes.Construct.eachArray bufs)
>-> Pipes.CSV.parse @(id :: Int)
>-> Pipes.Stream.unEOS
rows `shouldEqual` ((\id -> { id }) <$> nums)