1
1
/* global Zinnia */
2
2
3
- import Spark from '../lib/spark.js'
3
+ import Spark , { newStats } from '../lib/spark.js'
4
4
import { test } from 'zinnia:test'
5
5
import { assertInstanceOf , assertEquals , assertArrayIncludes } from 'zinnia:assert'
6
6
import { SPARK_VERSION } from '../lib/constants.js'
@@ -47,24 +47,15 @@ test('getRetrieval', async () => {
47
47
} ] )
48
48
} )
49
49
50
- // TODO: test more cases
51
- test ( 'fetchCAR' , async ( ) => {
50
+ test ( 'fetchCAR - http' , async ( ) => {
52
51
const requests = [ ]
53
- const mockedFetch = async url => {
54
- requests . push ( url . toString ( ) )
55
- return fetch ( url )
56
- }
57
- const spark = new Spark ( { fetch : mockedFetch } )
58
- const stats = {
59
- timeout : false ,
60
- startAt : new Date ( ) ,
61
- firstByteAt : null ,
62
- endAt : null ,
63
- carTooLarge : false ,
64
- byteLength : 0 ,
65
- carChecksum : null ,
66
- statusCode : null
67
- }
52
+ const spark = new Spark ( {
53
+ fetch : async ( url ) => {
54
+ requests . push ( url . toString ( ) )
55
+ return fetch ( url )
56
+ }
57
+ } )
58
+ const stats = newStats ( )
68
59
await spark . fetchCAR ( 'http' , '/dns/frisbii.fly.dev/tcp/443/https' , KNOWN_CID , stats )
69
60
assertEquals ( stats . statusCode , 200 , 'stats.statusCode' )
70
61
assertEquals ( stats . timeout , false , 'stats.timeout' )
@@ -77,6 +68,35 @@ test('fetchCAR', async () => {
77
68
assertEquals ( requests , [ `https://frisbii.fly.dev/ipfs/${ KNOWN_CID } ?dag-scope=block` ] )
78
69
} )
79
70
71
+ test ( 'fetchCAR - graphsync' , async ( ) => {
72
+ // This test relies on data stored as part of a Filecoin deal which will eventually expire.
73
+ // Also the storage provider may decide to stop serving Graphsync retrievals.
74
+ // When that happens, this test will start failing, and we will need to find different
75
+ // content that can be retrieved over Graphsync.
76
+ // Hopefully, we will no longer support Graphsync by that time.
77
+ const cid = 'bafybeiepi56qxfcwqgpstg25r6sonig7y3pzd37lwambzmlcmbnujjri4a'
78
+ const addr = '/dns/f010479.twinquasar.io/tcp/42002/p2p/12D3KooWHKeaNCnYByQUMS2n5PAZ1KZ9xKXqsb4bhpxVJ6bBJg5V'
79
+
80
+ const requests = [ ]
81
+ const spark = new Spark ( {
82
+ fetch : async ( url ) => {
83
+ requests . push ( url . toString ( ) )
84
+ return fetch ( url )
85
+ }
86
+ } )
87
+ const stats = newStats ( )
88
+ await spark . fetchCAR ( 'graphsync' , addr , cid , stats )
89
+ assertEquals ( stats . statusCode , 200 , 'stats.statusCode' )
90
+ assertEquals ( stats . timeout , false , 'stats.timeout' )
91
+ assertInstanceOf ( stats . startAt , Date )
92
+ assertInstanceOf ( stats . firstByteAt , Date )
93
+ assertInstanceOf ( stats . endAt , Date )
94
+ assertEquals ( stats . carTooLarge , false , 'stats.carTooLarge' )
95
+ assertEquals ( stats . byteLength , 217 , 'stats.byteLength' )
96
+ assertEquals ( stats . carChecksum , '1220a8d765159d8829f2bca7df05e5cd46eb88bdaa30905d3d08c6295562ea072f0f' , 'stats.carChecksum' )
97
+ assertEquals ( requests , [ `ipfs://${ cid } ?dag-scope=block&protocols=graphsync&providers=${ encodeURIComponent ( addr ) } ` ] )
98
+ } )
99
+
80
100
/* Disabled as long as we are fetching the top-level block only
81
101
test('fetchCAR exceeding MAX_CAR_SIZE', async () => {
82
102
const fetch = async url => {
@@ -91,13 +111,7 @@ test('fetchCAR exceeding MAX_CAR_SIZE', async () => {
91
111
}
92
112
}
93
113
const spark = new Spark({ fetch })
94
- const stats = {
95
- timeout: false,
96
- carTooLarge: false,
97
- byteLength: 0,
98
- carChecksum: null,
99
- statusCode: null
100
- }
114
+ const stats = newStats()
101
115
await spark.fetchCAR('http', '/ip4/127.0.0.1/tcp/80/http', 'bafy', stats)
102
116
assertEquals(stats.timeout, false)
103
117
assertEquals(stats.carTooLarge, true)
@@ -107,6 +121,108 @@ test('fetchCAR exceeding MAX_CAR_SIZE', async () => {
107
121
})
108
122
*/
109
123
124
+ test ( 'fetchCAR fails with statusCode=701 (unsupported host type)' , async ( ) => {
125
+ const spark = new Spark ( )
126
+ const stats = newStats ( )
127
+ await spark . fetchCAR ( 'http' , '/ip99/1.2.3.4.5/tcp/80/http' , KNOWN_CID , stats )
128
+ assertEquals ( stats . statusCode , 701 , 'stats.statusCode' )
129
+ } )
130
+
131
+ test ( 'fetchCAR fails with statusCode=702 (protocol is not tcp)' , async ( ) => {
132
+ const spark = new Spark ( )
133
+ const stats = newStats ( )
134
+ await spark . fetchCAR ( 'http' , '/ip4/1.2.3.4/udp/80/http' , KNOWN_CID , stats )
135
+ assertEquals ( stats . statusCode , 702 , 'stats.statusCode' )
136
+ } )
137
+
138
+ test ( 'fetchCAR fails with statusCode=703 (scheme is not http/https)' , async ( ) => {
139
+ const spark = new Spark ( )
140
+ const stats = newStats ( )
141
+ await spark . fetchCAR ( 'http' , '/ip4/1.2.3.4/tcp/80/ldap' , KNOWN_CID , stats )
142
+ assertEquals ( stats . statusCode , 703 , 'stats.statusCode' )
143
+ } )
144
+
145
+ test ( 'fetchCAR fails with statusCode=704 (multiaddr has too many parts)' , async ( ) => {
146
+ const spark = new Spark ( )
147
+ const stats = newStats ( )
148
+ await spark . fetchCAR ( 'http' , '/ip4/1.2.3.4/tcp/80/http/p2p/pubkey' , KNOWN_CID , stats )
149
+ assertEquals ( stats . statusCode , 704 , 'stats.statusCode' )
150
+ } )
151
+
152
+ test ( 'fetchCAR fails with statusCode=801 (DNS error)' , async ( ) => {
153
+ const spark = new Spark ( )
154
+ const stats = newStats ( )
155
+ await spark . fetchCAR ( 'http' , '/dns/invalid.example.com/tcp/80/http' , KNOWN_CID , stats )
156
+ assertEquals ( stats . statusCode , 801 , 'stats.statusCode' )
157
+ } )
158
+
159
+ test ( 'fetchCAR fails with statusCode=802 (TCP connection refused)' , async ( ) => {
160
+ const spark = new Spark ( )
161
+ const stats = newStats ( )
162
+ await spark . fetchCAR ( 'http' , '/ip4/127.0.0.1/tcp/79/http' , KNOWN_CID , stats )
163
+ assertEquals ( stats . statusCode , 802 , 'stats.statusCode' )
164
+ } )
165
+
166
+ test ( 'fetchCAR fails with statusCode=802 (TCP connection refused)' , async ( ) => {
167
+ const spark = new Spark ( )
168
+ const stats = newStats ( )
169
+ await spark . fetchCAR ( 'http' , '/ip4/127.0.0.1/tcp/79/http' , KNOWN_CID , stats )
170
+ assertEquals ( stats . statusCode , 802 , 'stats.statusCode' )
171
+ } )
172
+
173
+ // TODO:
174
+ // statusCode=901 - unsupported hash algorithm
175
+
176
+ test ( 'fetchCAR fails with statusCode=902 (hash mismatch)' , async ( ) => {
177
+ const spark = new Spark ( {
178
+ fetch : async ( url ) => {
179
+ const res = await fetch ( url )
180
+ return {
181
+ status : res . status ,
182
+ ok : res . ok ,
183
+ body : ( async function * ( ) {
184
+ const bytes = new Uint8Array ( await res . arrayBuffer ( ) )
185
+ // manipulate one byte inside the CAR block
186
+ bytes [ bytes . length - 1 ] = bytes [ bytes . length - 1 ] ^ 0x88
187
+ yield bytes
188
+ } ) ( )
189
+ }
190
+ }
191
+ } )
192
+ const stats = newStats ( )
193
+ await spark . fetchCAR ( 'http' , '/dns/frisbii.fly.dev/tcp/443/https' , KNOWN_CID , stats )
194
+ assertEquals ( stats . statusCode , 902 , 'stats.statusCode' )
195
+ } )
196
+
197
+ test ( 'fetchCAR fails with statusCode=903 (unexpected CAR block)' , async ( ) => {
198
+ const spark = new Spark ( {
199
+ // Fetch the root block of a different CID
200
+ fetch : ( _url ) => fetch (
201
+ 'https://frisbii.fly.dev/ipfs/bafkreih5zasorm4tlfga4ztwvm2dlnw6jxwwuvgnokyt3mjamfn3svvpyy?dag-scope=block'
202
+ )
203
+ } )
204
+ const stats = newStats ( )
205
+ await spark . fetchCAR ( 'http' , '/ip4/127.0.0.1/tcp/80/http' , KNOWN_CID , stats )
206
+ assertEquals ( stats . statusCode , 903 , 'stats.statusCode' )
207
+ } )
208
+
209
+ test ( 'fetchCAR fails with statusCode=904 (cannot parse CAR)' , async ( ) => {
210
+ const spark = new Spark ( {
211
+ fetch : async ( _url ) => {
212
+ return {
213
+ status : 200 ,
214
+ ok : true ,
215
+ body : ( async function * ( ) {
216
+ yield new Uint8Array ( [ 1 , 2 , 3 ] )
217
+ } ) ( )
218
+ }
219
+ }
220
+ } )
221
+ const stats = newStats ( )
222
+ await spark . fetchCAR ( 'http' , '/ip4/127.0.0.1/tcp/80/http' , KNOWN_CID , stats )
223
+ assertEquals ( stats . statusCode , 904 , 'stats.statusCode' )
224
+ } )
225
+
110
226
test ( 'submitRetrieval' , async ( ) => {
111
227
const requests = [ ]
112
228
const fetch = async ( url , allOpts ) => {
0 commit comments