Karim shoair commited on
Commit
67df82a
·
1 Parent(s): 0907267

test: adding new tests and updating existing ones

Browse files
tests/cli/__init__.py ADDED
File without changes
tests/cli/test_cli.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+ from click.testing import CliRunner
3
+ from unittest.mock import patch, MagicMock
4
+ import pytest_httpbin
5
+
6
+ from scrapling.cli import (
7
+ install, shell, mcp,
8
+ get, post, put, delete, fetch, stealthy_fetch
9
+ )
10
+
11
+
12
+ @pytest_httpbin.use_class_based_httpbin
13
+ class TestCLI:
14
+ """Test CLI functionality"""
15
+
16
+ @pytest.fixture
17
+ def html_url(self, httpbin):
18
+ return f"{httpbin.url}/html"
19
+
20
+ @pytest.fixture
21
+ def runner(self):
22
+ return CliRunner()
23
+
24
+ def test_install_command(self, runner):
25
+ """Test install command"""
26
+ result = runner.invoke(install)
27
+ assert result.exit_code == 0
28
+
29
+ def test_shell_command(self, runner):
30
+ """Test shell command"""
31
+ with patch('scrapling.core.shell.CustomShell') as mock_shell:
32
+ mock_instance = MagicMock()
33
+ mock_shell.return_value = mock_instance
34
+
35
+ result = runner.invoke(shell)
36
+ assert result.exit_code == 0
37
+ mock_instance.start.assert_called_once()
38
+
39
+ def test_mcp_command(self, runner):
40
+ """Test MCP command"""
41
+ with patch('scrapling.core.ai.ScraplingMCPServer') as mock_server:
42
+ mock_instance = MagicMock()
43
+ mock_server.return_value = mock_instance
44
+
45
+ result = runner.invoke(mcp)
46
+ assert result.exit_code == 0
47
+ mock_instance.serve.assert_called_once()
48
+
49
+ def test_extract_get_command(self, runner, tmp_path, html_url):
50
+ """Test extract `get` command"""
51
+ output_file = tmp_path / "output.md"
52
+
53
+ with patch('scrapling.fetchers.Fetcher.get') as mock_get:
54
+ mock_response = MagicMock()
55
+ mock_response.status = 200
56
+ mock_get.return_value = mock_response
57
+
58
+ with patch('scrapling.cli.Convertor.write_content_to_file'):
59
+ result = runner.invoke(
60
+ get,
61
+ [html_url, str(output_file)]
62
+ )
63
+ assert result.exit_code == 0
64
+
65
+ # Test with various options
66
+ with patch('scrapling.fetchers.Fetcher.get') as mock_get:
67
+ mock_get.return_value = mock_response
68
+
69
+ with patch('scrapling.cli.Convertor.write_content_to_file'):
70
+ result = runner.invoke(
71
+ get,
72
+ [
73
+ html_url,
74
+ str(output_file),
75
+ '-H', 'User-Agent: Test',
76
+ '--cookies', 'session=abc123',
77
+ '--timeout', '60',
78
+ '--proxy', 'http://proxy:8080',
79
+ '-s', '.content',
80
+ '-p', 'page=1'
81
+ ]
82
+ )
83
+ assert result.exit_code == 0
84
+
85
+ def test_extract_post_command(self, runner, tmp_path, html_url):
86
+ """Test extract `post` command"""
87
+ output_file = tmp_path / "output.html"
88
+
89
+ with patch('scrapling.fetchers.Fetcher.post') as mock_post:
90
+ mock_response = MagicMock()
91
+ mock_post.return_value = mock_response
92
+
93
+ with patch('scrapling.cli.Convertor.write_content_to_file'):
94
+ result = runner.invoke(
95
+ post,
96
+ [
97
+ html_url,
98
+ str(output_file),
99
+ '-d', 'key=value',
100
+ '-j', '{"data": "test"}'
101
+ ]
102
+ )
103
+ assert result.exit_code == 0
104
+
105
+ def test_extract_put_command(self, runner, tmp_path, html_url):
106
+ """Test extract `put` command"""
107
+ output_file = tmp_path / "output.html"
108
+
109
+ with patch('scrapling.fetchers.Fetcher.put') as mock_put:
110
+ mock_response = MagicMock()
111
+ mock_put.return_value = mock_response
112
+
113
+ with patch('scrapling.cli.Convertor.write_content_to_file'):
114
+ result = runner.invoke(
115
+ put,
116
+ [
117
+ html_url,
118
+ str(output_file),
119
+ '-d', 'key=value',
120
+ '-j', '{"data": "test"}'
121
+ ]
122
+ )
123
+ assert result.exit_code == 0
124
+
125
+ def test_extract_delete_command(self, runner, tmp_path, html_url):
126
+ """Test extract `delete` command"""
127
+ output_file = tmp_path / "output.html"
128
+
129
+ with patch('scrapling.fetchers.Fetcher.delete') as mock_delete:
130
+ mock_response = MagicMock()
131
+ mock_delete.return_value = mock_response
132
+
133
+ with patch('scrapling.cli.Convertor.write_content_to_file'):
134
+ result = runner.invoke(
135
+ delete,
136
+ [
137
+ html_url,
138
+ str(output_file)
139
+ ]
140
+ )
141
+ assert result.exit_code == 0
142
+
143
+ def test_extract_fetch_command(self, runner, tmp_path, html_url):
144
+ """Test extract fetch command"""
145
+ output_file = tmp_path / "output.txt"
146
+
147
+ with patch('scrapling.fetchers.DynamicFetcher.fetch') as mock_fetch:
148
+ mock_response = MagicMock()
149
+ mock_fetch.return_value = mock_response
150
+
151
+ with patch('scrapling.cli.Convertor.write_content_to_file'):
152
+ result = runner.invoke(
153
+ fetch,
154
+ [
155
+ html_url,
156
+ str(output_file),
157
+ '--headless',
158
+ '--stealth',
159
+ '--timeout', '60000'
160
+ ]
161
+ )
162
+ assert result.exit_code == 0
163
+
164
+ def test_extract_stealthy_fetch_command(self, runner, tmp_path, html_url):
165
+ """Test extract fetch command"""
166
+ output_file = tmp_path / "output.md"
167
+
168
+ with patch('scrapling.fetchers.StealthyFetcher.fetch') as mock_fetch:
169
+ mock_response = MagicMock()
170
+ mock_fetch.return_value = mock_response
171
+
172
+ with patch('scrapling.cli.Convertor.write_content_to_file'):
173
+ result = runner.invoke(
174
+ stealthy_fetch,
175
+ [
176
+ html_url,
177
+ str(output_file),
178
+ '--headless',
179
+ '--css-selector', 'body',
180
+ '--timeout', '60000'
181
+ ]
182
+ )
183
+ assert result.exit_code == 0
184
+
185
+ def test_invalid_arguments(self, runner, html_url):
186
+ """Test invalid arguments handling"""
187
+ # Missing required arguments
188
+ result = runner.invoke(get)
189
+ assert result.exit_code != 0
190
+
191
+ # Invalid output file extension
192
+ with patch('scrapling.cli.Convertor.write_content_to_file') as mock_write:
193
+ mock_write.side_effect = ValueError("Unknown file type")
194
+
195
+ _ = runner.invoke(
196
+ get,
197
+ [html_url, 'output.invalid']
198
+ )
199
+ # Should handle the error gracefully
tests/cli/test_shell_functionality.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+ from unittest.mock import patch, MagicMock
3
+
4
+ from scrapling.parser import Selector
5
+ from scrapling.core.shell import CustomShell, CurlParser, Convertor
6
+
7
+
8
+ class TestCurlParser:
9
+ """Test curl command parsing"""
10
+
11
+ @pytest.fixture
12
+ def parser(self):
13
+ return CurlParser()
14
+
15
+ def test_basic_curl_parse(self, parser):
16
+ """Test parsing basic curl commands"""
17
+ # Simple GET
18
+ curl_cmd = 'curl https://example.com'
19
+ request = parser.parse(curl_cmd)
20
+
21
+ assert request.url == 'https://example.com'
22
+ assert request.method == 'get'
23
+ assert request.data is None
24
+
25
+ def test_curl_with_headers(self, parser):
26
+ """Test parsing curl with headers"""
27
+ curl_cmd = '''curl https://example.com \
28
+ -H "User-Agent: Mozilla/5.0" \
29
+ -H "Accept: application/json"'''
30
+
31
+ request = parser.parse(curl_cmd)
32
+
33
+ assert request.headers['User-Agent'] == 'Mozilla/5.0'
34
+ assert request.headers['Accept'] == 'application/json'
35
+
36
+ def test_curl_with_data(self, parser):
37
+ """Test parsing curl with data"""
38
+ # Form data
39
+ curl_cmd = 'curl https://example.com -X POST -d "key=value&foo=bar"'
40
+ request = parser.parse(curl_cmd)
41
+
42
+ assert request.method == 'post'
43
+ assert request.data == 'key=value&foo=bar'
44
+
45
+ # JSON data
46
+ curl_cmd = """curl https://example.com -X POST --data-raw '{"key": "value"}'"""
47
+ request = parser.parse(curl_cmd)
48
+
49
+ assert request.json_data == {"key": "value"}
50
+
51
+ def test_curl_with_cookies(self, parser):
52
+ """Test parsing curl with cookies"""
53
+ curl_cmd = '''curl https://example.com \
54
+ -H "Cookie: session=abc123; user=john" \
55
+ -b "extra=cookie"'''
56
+
57
+ request = parser.parse(curl_cmd)
58
+
59
+ assert request.cookies['session'] == 'abc123'
60
+ assert request.cookies['user'] == 'john'
61
+ assert request.cookies['extra'] == 'cookie'
62
+
63
+ def test_curl_with_proxy(self, parser):
64
+ """Test parsing curl with proxy"""
65
+ curl_cmd = 'curl https://example.com -x http://proxy:8080 -U user:pass'
66
+ request = parser.parse(curl_cmd)
67
+
68
+ assert 'http://user:pass@proxy:8080' in request.proxy['http']
69
+
70
+ def test_curl2fetcher(self, parser):
71
+ """Test converting curl to fetcher request"""
72
+ with patch('scrapling.fetchers.Fetcher.get') as mock_get:
73
+ mock_response = MagicMock()
74
+ mock_get.return_value = mock_response
75
+
76
+ curl_cmd = 'curl https://example.com'
77
+ _ = parser.convert2fetcher(curl_cmd)
78
+
79
+ mock_get.assert_called_once()
80
+
81
+ def test_invalid_curl_commands(self, parser):
82
+ """Test handling invalid curl commands"""
83
+ # Invalid format
84
+ with pytest.raises(AttributeError):
85
+ parser.parse('not a curl command')
86
+
87
+
88
+ class TestConvertor:
89
+ """Test content conversion functionality"""
90
+
91
+ @pytest.fixture
92
+ def sample_html(self):
93
+ return """
94
+ <html>
95
+ <body>
96
+ <div class="content">
97
+ <h1>Title</h1>
98
+ <p>Some text content</p>
99
+ </div>
100
+ </body>
101
+ </html>
102
+ """
103
+
104
+ def test_extract_markdown(self, sample_html):
105
+ """Test extracting content as Markdown"""
106
+ page = Selector(sample_html)
107
+ content = list(Convertor._extract_content(page, "markdown"))
108
+
109
+ assert len(content) > 0
110
+ assert "Title\n=====" in content[0] # Markdown conversion
111
+
112
+ def test_extract_html(self, sample_html):
113
+ """Test extracting content as HTML"""
114
+ page = Selector(sample_html)
115
+ content = list(Convertor._extract_content(page, "html"))
116
+
117
+ assert len(content) > 0
118
+ assert "<h1>Title</h1>" in content[0]
119
+
120
+ def test_extract_text(self, sample_html):
121
+ """Test extracting content as plain text"""
122
+ page = Selector(sample_html)
123
+ content = list(Convertor._extract_content(page, "text"))
124
+
125
+ assert len(content) > 0
126
+ assert "Title" in content[0]
127
+ assert "Some text content" in content[0]
128
+
129
+ def test_extract_with_selector(self, sample_html):
130
+ """Test extracting with CSS selector"""
131
+ page = Selector(sample_html)
132
+ content = list(Convertor._extract_content(
133
+ page,
134
+ "text",
135
+ css_selector=".content"
136
+ ))
137
+
138
+ assert len(content) > 0
139
+
140
+ def test_write_to_file(self, sample_html, tmp_path):
141
+ """Test writing content to files"""
142
+ page = Selector(sample_html)
143
+
144
+ # Test markdown
145
+ md_file = tmp_path / "output.md"
146
+ Convertor.write_content_to_file(page, str(md_file))
147
+ assert md_file.exists()
148
+
149
+ # Test HTML
150
+ html_file = tmp_path / "output.html"
151
+ Convertor.write_content_to_file(page, str(html_file))
152
+ assert html_file.exists()
153
+
154
+ # Test text
155
+ txt_file = tmp_path / "output.txt"
156
+ Convertor.write_content_to_file(page, str(txt_file))
157
+ assert txt_file.exists()
158
+
159
+ def test_invalid_operations(self, sample_html):
160
+ """Test error handling in convertor"""
161
+ page = Selector(sample_html)
162
+
163
+ # Invalid extraction type
164
+ with pytest.raises(ValueError):
165
+ list(Convertor._extract_content(page, "invalid"))
166
+
167
+ # Invalid filename
168
+ with pytest.raises(ValueError):
169
+ Convertor.write_content_to_file(page, "")
170
+
171
+ # Unknown file extension
172
+ with pytest.raises(ValueError):
173
+ Convertor.write_content_to_file(page, "output.xyz")
174
+
175
+
176
+ class TestCustomShell:
177
+ """Test interactive shell functionality"""
178
+
179
+ def test_shell_initialization(self):
180
+ """Test shell initialization"""
181
+ with patch('scrapling.core.shell.InteractiveShellEmbed'):
182
+ shell = CustomShell(code="", log_level="debug")
183
+
184
+ assert shell.log_level == 10 # DEBUG level
185
+ assert shell.page is None
186
+ assert len(shell.pages) == 0
187
+
188
+ def test_shell_namespace(self):
189
+ """Test shell namespace creation"""
190
+ with patch('scrapling.core.shell.InteractiveShellEmbed'):
191
+ shell = CustomShell(code="")
192
+ namespace = shell.get_namespace()
193
+
194
+ # Check all expected functions/classes are available
195
+ assert 'get' in namespace
196
+ assert 'post' in namespace
197
+ assert 'Fetcher' in namespace
198
+ assert 'DynamicFetcher' in namespace
199
+ assert 'view' in namespace
200
+ assert 'uncurl' in namespace
tests/fetchers/async/test_camoufox_session.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import pytest
3
+ import asyncio
4
+
5
+ import pytest_httpbin
6
+
7
+ from scrapling.engines import AsyncStealthySession
8
+
9
+
10
+ @pytest_httpbin.use_class_based_httpbin
11
+ @pytest.mark.asyncio
12
+ class TestAsyncStealthySession:
13
+ """Test AsyncStealthySession"""
14
+
15
+ # The `AsyncStealthySession` is inheriting from `StealthySession` class so no need to repeat all the tests
16
+ @pytest.fixture
17
+ def urls(self, httpbin):
18
+ return {
19
+ "basic": f"{httpbin.url}/get",
20
+ "html": f"{httpbin.url}/html",
21
+ }
22
+
23
+ async def test_concurrent_async_requests(self, urls):
24
+ """Test concurrent requests with async session"""
25
+ async with AsyncStealthySession(max_pages=3) as session:
26
+ # Launch multiple concurrent requests
27
+ tasks = [
28
+ session.fetch(urls["basic"]),
29
+ session.fetch(urls["html"]),
30
+ session.fetch(urls["basic"])
31
+ ]
32
+
33
+ assert session.max_pages == 3
34
+ assert session.page_pool.max_pages == 3
35
+ assert session.context is not None
36
+
37
+ responses = await asyncio.gather(*tasks)
38
+
39
+ # All should succeed
40
+ assert all(r.status == 200 for r in responses)
41
+
42
+ # Check pool stats
43
+ stats = session.get_pool_stats()
44
+ assert stats["total_pages"] <= 3
45
+
46
+ # After exit, should be closed
47
+ assert session._closed is True
48
+
49
+ # Should raise RuntimeError when used after closing
50
+ with pytest.raises(RuntimeError):
51
+ await session.fetch(urls["basic"])
52
+
53
+ async def test_page_pool_management(self, urls):
54
+ """Test page pool creation and reuse"""
55
+ async with AsyncStealthySession() as session:
56
+ # The first request creates a page
57
+ _ = await session.fetch(urls["basic"])
58
+ assert session.page_pool.pages_count == 1
59
+
60
+ # The second request should reuse the page
61
+ _ = await session.fetch(urls["html"])
62
+ assert session.page_pool.pages_count == 1
63
+
64
+ # Check pool stats
65
+ stats = session.get_pool_stats()
66
+ assert stats["total_pages"] == 1
67
+ assert stats["max_pages"] == 1
68
+
69
+ async def test_stealthy_session_with_options(self, urls):
70
+ """Test AsyncStealthySession with various options"""
71
+ async with AsyncStealthySession(
72
+ max_pages=1,
73
+ block_images=True,
74
+ disable_ads=True,
75
+ humanize=True
76
+ ) as session:
77
+ response = await session.fetch(urls["html"])
78
+ assert response.status == 200
79
+
80
+ async def test_error_handling_in_fetch(self, urls):
81
+ """Test error handling during fetch"""
82
+ async with AsyncStealthySession() as session:
83
+ # Test with invalid URL
84
+ with pytest.raises(Exception):
85
+ await session.fetch("invalid://url")
tests/fetchers/async/test_dynamic_session.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+ import asyncio
3
+
4
+ import pytest_httpbin
5
+
6
+ from scrapling.engines import AsyncDynamicSession
7
+
8
+
9
+ @pytest_httpbin.use_class_based_httpbin
10
+ @pytest.mark.asyncio
11
+ class TestAsyncDynamicSession:
12
+ """Test AsyncDynamicSession"""
13
+
14
+ # The `AsyncDynamicSession` is inheriting from `DynamicSession` class so no need to repeat all the tests
15
+ @pytest.fixture
16
+ def urls(self, httpbin):
17
+ return {
18
+ "basic": f"{httpbin.url}/get",
19
+ "html": f"{httpbin.url}/html",
20
+ }
21
+
22
+ async def test_concurrent_async_requests(self, urls):
23
+ """Test concurrent requests with async session"""
24
+ async with AsyncDynamicSession(max_pages=3) as session:
25
+ # Launch multiple concurrent requests
26
+ tasks = [
27
+ session.fetch(urls["basic"]),
28
+ session.fetch(urls["html"]),
29
+ session.fetch(urls["basic"])
30
+ ]
31
+
32
+ assert session.max_pages == 3
33
+ assert session.page_pool.max_pages == 3
34
+ assert session.context is not None
35
+
36
+ responses = await asyncio.gather(*tasks)
37
+
38
+ # All should succeed
39
+ assert all(r.status == 200 for r in responses)
40
+
41
+ # Check pool stats
42
+ stats = session.get_pool_stats()
43
+ assert stats["total_pages"] <= 3
44
+
45
+ # After exit, should be closed
46
+ assert session._closed is True
47
+
48
+ # Should raise RuntimeError when used after closing
49
+ with pytest.raises(RuntimeError):
50
+ await session.fetch(urls["basic"])
51
+
52
+ async def test_page_pool_management(self, urls):
53
+ """Test page pool creation and reuse"""
54
+ async with AsyncDynamicSession() as session:
55
+ # The first request creates a page
56
+ _ = await session.fetch(urls["basic"])
57
+ assert session.page_pool.pages_count == 1
58
+
59
+ # The second request should reuse the page
60
+ _ = await session.fetch(urls["html"])
61
+ assert session.page_pool.pages_count == 1
62
+
63
+ # Check pool stats
64
+ stats = session.get_pool_stats()
65
+ assert stats["total_pages"] == 1
66
+ assert stats["max_pages"] == 1
67
+
68
+ async def test_dynamic_session_with_options(self, urls):
69
+ """Test AsyncDynamicSession with various options"""
70
+ async with AsyncDynamicSession(
71
+ headless=False,
72
+ stealth=True,
73
+ disable_resources=True,
74
+ extra_headers={"X-Test": "value"}
75
+ ) as session:
76
+ response = await session.fetch(urls["html"])
77
+ assert response.status == 200
78
+
79
+ async def test_error_handling_in_fetch(self, urls):
80
+ """Test error handling during fetch"""
81
+ async with AsyncDynamicSession() as session:
82
+ # Test with invalid URL
83
+ with pytest.raises(Exception):
84
+ await session.fetch("invalid://url")
tests/fetchers/async/test_requests_session.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+
4
+ from scrapling.engines.static import AsyncFetcherClient
5
+
6
+
7
+ class TestFetcherSession:
8
+ """Test FetcherSession functionality"""
9
+
10
+ def test_async_fetcher_client_creation(self):
11
+ """Test AsyncFetcherClient creation"""
12
+ client = AsyncFetcherClient()
13
+
14
+ # Should not have context manager methods
15
+ assert client.__aenter__ is None
16
+ assert client.__aexit__ is None
17
+ assert client._async_curl_session is True # Special marker
tests/fetchers/sync/test_requests_session.py CHANGED
@@ -1,7 +1,7 @@
1
  import pytest
2
 
3
 
4
- from scrapling.engines.static import FetcherSession, FetcherClient, AsyncFetcherClient
5
 
6
 
7
  class TestFetcherSession:
@@ -45,12 +45,3 @@ class TestFetcherSession:
45
  assert client.__enter__ is None
46
  assert client.__exit__ is None
47
  assert client._curl_session is True # Special marker
48
-
49
- def test_async_fetcher_client_creation(self):
50
- """Test AsyncFetcherClient creation"""
51
- client = AsyncFetcherClient()
52
-
53
- # Should not have context manager methods
54
- assert client.__aenter__ is None
55
- assert client.__aexit__ is None
56
- assert client._async_curl_session is True # Special marker
 
1
  import pytest
2
 
3
 
4
+ from scrapling.engines.static import FetcherSession, FetcherClient
5
 
6
 
7
  class TestFetcherSession:
 
45
  assert client.__enter__ is None
46
  assert client.__exit__ is None
47
  assert client._curl_session is True # Special marker