|
1 | 1 | # # # # # # # # # # # # # # # # # # # # # # # # # # # # |
2 | 2 | # # |
3 | | -# Bundler and Minimizer for Web files v3.1.0 # |
| 3 | +# Bundler and Minimizer for Web files v3.1.1 # |
4 | 4 | # # |
5 | 5 | # Copyright 2017, PedroHenriques # |
6 | 6 | # http://www.pedrojhenriques.com # |
@@ -337,7 +337,7 @@ def run() |
337 | 337 | # calls for the print of the program's outro message |
338 | 338 | def printOutroMsg() |
339 | 339 | # print outro message |
340 | | - @cli_obj.printStr("\n=> Thank you for using this application!\nFollow this application at https://github.com/PedroHenriques/Minimizer_CSS-JS\n", false) |
| 340 | + @cli_obj.printStr("\n=> Thank you for using this application!\nFollow this application at https://github.com/PedroHenriques/Web_Bundler_Minimizer\n", false) |
341 | 341 | end |
342 | 342 |
|
343 | 343 | private |
@@ -1621,22 +1621,24 @@ def crawlFile(entry_path, output_path_parts, re_pattern, callback) |
1621 | 1621 | callback_return = callback.call(crawl_path, re_match) |
1622 | 1622 |
|
1623 | 1623 | # check if the lambda function returned a failure or an invalid data type |
1624 | | - if callback_return === false || !callback_return.class.to_s.eql?("String") |
| 1624 | + if callback_return === false || !callback_return.class.to_s.eql?("Array") |
1625 | 1625 | # it did |
1626 | 1626 | # something went wrong while executing the lambda function |
1627 | 1627 | # return a failure |
1628 | 1628 | return(false) |
1629 | 1629 | else |
1630 | 1630 | # it didn't |
1631 | | - # check if the file found hasn't been encountered already |
1632 | | - if !imported_abs_paths.include?(callback_return) && !file_queue.include?(callback_return) |
1633 | | - # it hasn't |
1634 | | - # add it to the queue |
1635 | | - file_queue.push(callback_return) |
1636 | | - end |
| 1631 | + callback_return.each { |file_path| |
| 1632 | + # check if the file found hasn't been encountered already |
| 1633 | + if !imported_abs_paths.include?(file_path) && !file_queue.include?(file_path) |
| 1634 | + # it hasn't |
| 1635 | + # add it to the queue |
| 1636 | + file_queue.push(file_path) |
| 1637 | + end |
1637 | 1638 |
|
1638 | | - # add the file found to the crawled file's array of imported files |
1639 | | - @file_crawl_data[crawl_path][:imports].push(callback_return) |
| 1639 | + # add the file found to the crawled file's array of imported files |
| 1640 | + @file_crawl_data[crawl_path][:imports].push(file_path) |
| 1641 | + } |
1640 | 1642 | end |
1641 | 1643 |
|
1642 | 1644 | # change file_content to have all the text after this match |
@@ -1808,7 +1810,9 @@ def crawlSASS(entry_path, output_path_parts) |
1808 | 1810 | def crawlTypeScript(entry_path, output_path_parts) |
1809 | 1811 | # build the lambda function that will be applied to each capture group |
1810 | 1812 | lambda_func = lambda { |crawl_path, re_match| |
1811 | | - return(resolveNodeImportPath(crawl_path, re_match[1], [".tsx", ".ts"])) |
| 1813 | + path = resolveNodeImportPath(crawl_path, re_match[1], [".tsx", ".ts"]) |
| 1814 | + |
| 1815 | + return(path.class.to_s.eql?("String") ? [path] : path) |
1812 | 1816 | } |
1813 | 1817 |
|
1814 | 1818 | # check if a path to a tsconfig.json file was provided in the configuration file |
|
0 commit comments