-
Notifications
You must be signed in to change notification settings - Fork 33
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Browse files
Browse the repository at this point in the history
* Initial stab at df. * Initial stab of what link extraction would look like with DFs. * Added test case. * Docs.
- Loading branch information
Showing
4 changed files
with
134 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
package io.archivesunleashed | ||
|
||
import org.apache.spark.sql.functions.udf | ||
|
||
/** | ||
* UDFs for data frames. | ||
*/ | ||
package object df { | ||
// TODO: UDFs for use with data frames go here, tentatively. There are couple of ways we could build UDFs, | ||
// by wrapping matchbox UDFs or by reimplementing them. The following examples illustrate. Obviously, we'll | ||
// need to populate more UDFs over time, but this is a start. | ||
|
||
val ExtractDomain = udf(io.archivesunleashed.matchbox.ExtractDomain.apply(_: String, "")) | ||
|
||
val RemovePrefixWWW = udf[String, String](_.replaceAll("^\\s*www\\.", "")) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
/* | ||
* Archives Unleashed Toolkit (AUT): | ||
* An open-source platform for analyzing web archives. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package io.archivesunleashed | ||
|
||
import com.google.common.io.Resources | ||
import io.archivesunleashed.df._ | ||
import org.apache.spark.sql.SparkSession | ||
import org.apache.spark.sql.functions._ | ||
import org.apache.spark.{SparkConf, SparkContext} | ||
import org.junit.runner.RunWith | ||
import org.scalatest.junit.JUnitRunner | ||
import org.scalatest.{BeforeAndAfter, FunSuite} | ||
|
||
@RunWith(classOf[JUnitRunner]) | ||
class SimpleDfTest extends FunSuite with BeforeAndAfter { | ||
private val arcPath = Resources.getResource("arc/example.arc.gz").getPath | ||
private val master = "local[4]" | ||
private val appName = "example-df" | ||
private var sc: SparkContext = _ | ||
|
||
before { | ||
val conf = new SparkConf() | ||
.setMaster(master) | ||
.setAppName(appName) | ||
sc = new SparkContext(conf) | ||
} | ||
|
||
test("count records") { | ||
val df = RecordLoader.loadArchives(arcPath, sc) | ||
.extractValidPagesDF() | ||
|
||
// We need this in order to use the $-notation | ||
val spark = SparkSession.builder().master("local").getOrCreate() | ||
import spark.implicits._ | ||
|
||
val results = df.select(ExtractDomain($"Url").as("Domain")) | ||
.groupBy("Domain").count().orderBy(desc("count")).head(3) | ||
|
||
// Results should be: | ||
// +------------------+-----+ | ||
// | Domain|count| | ||
// +------------------+-----+ | ||
// | www.archive.org| 132| | ||
// | deadlists.com| 2| | ||
// |www.hideout.com.br| 1| | ||
// +------------------+-----+ | ||
|
||
assert(results(0).get(0) == "www.archive.org") | ||
assert(results(0).get(1) == 132) | ||
|
||
assert(results(1).get(0) == "deadlists.com") | ||
assert(results(1).get(1) == 2) | ||
|
||
assert(results(2).get(0) == "www.hideout.com.br") | ||
assert(results(2).get(1) == 1) | ||
} | ||
|
||
after { | ||
if (sc != null) { | ||
sc.stop() | ||
} | ||
} | ||
} |