Skip to content

Commit

Permalink
Merge pull request #33 from MGousseff/master
Browse files Browse the repository at this point in the history
Support to flatgeobuffer
  • Loading branch information
MGousseff authored Sep 18, 2024
2 parents 33a71ef + 04d4cc4 commit 18faa3e
Show file tree
Hide file tree
Showing 34 changed files with 1,390 additions and 476 deletions.
2 changes: 2 additions & 0 deletions .Rbuildignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,5 @@
^CODE_OF_CONDUCT\.md$
^CONTRIBUTING\.md$
^bdtopo_2_2_osm\.csv$
^compareMultipleLCZ\.R$
^createIntersect\.R$
9 changes: 4 additions & 5 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: lczexplore
Title: lczexplore
Version: 0.0.1.0003
Version: 0.0.1.0029
Authors@R: c(
person("Matthieu", "Gousseff", , "matthieu.gousseff@univ-ubs.fr", role = c("aut", "cre")),
person(, "Centre National de la Recherche Scientifique, Lab-Sticc", role = "cph", comment = c(ORCID = "0000-0002-7106-2677"))
Expand All @@ -9,7 +9,7 @@ Description: This lczexplore package automatize the comparison of sets of local
License: LGPL (>= 3)
Encoding: UTF-8
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.2.3
RoxygenNote: 7.3.2
Imports: RColorBrewer,
cowplot,
dplyr,
Expand All @@ -22,16 +22,15 @@ Imports: RColorBrewer,
units,
rlang,
grDevices,
DescTools,
methods
Suggests:
tinytest,
knitr,
rmarkdown,
testthat (>= 3.0.0),
png
png,
markdown
Config/testthat/edition: 3
VignetteBuilder: knitr
LazyData: true
Depends:
R (>= 2.10)
Binary file added GeneralUniquenessSensib.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
4 changes: 3 additions & 1 deletion NAMESPACE
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Generated by roxygen2: do not edit by hand

export(CohenKappa)
export(LCZareas)
export(areColors)
export(compareLCZ)
Expand Down Expand Up @@ -28,15 +29,16 @@ import(sf)
import(tidyr)
import(units)
import(utils)
importFrom(DescTools,CohenKappa)
importFrom(forcats,fct_recode)
importFrom(ggplot2,aes)
importFrom(ggplot2,geom_sf)
importFrom(ggplot2,ggtitle)
importFrom(ggplot2,guides)
importFrom(grDevices,palette.colors)
importFrom(magrittr,"%>%")
importFrom(stats,qnorm)
importFrom(stats,quantile)
importFrom(terra,as.polygons)
importFrom(terra,crop)
importFrom(terra,rast)
importFrom(tidyr,drop_na)
126 changes: 126 additions & 0 deletions R/CohenKappa.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
#' Compute Cohen's Kappa Coefficient. Taken from descTools,
#' who based it on Kappa from library(vcd)
#' author: David Meyer
#' see also: kappa in library(psych)
#' Integrated here to reduce dependecy to descTools as it is the only function from DescTools we used
#' Computes the agreement rates Cohen's kappa and weighted kappa and their confidence intervals.
#'
#' @param x can either be a numeric vector or a confusion matrix.
#' In the latter case x must be a square matrix, in lczexplore,
#' will take the matrix of agreement weighted by area for all intersected geometries
#' @param y not used here, but allows to compute cross table of x and y as an entry
#' @param weights either one out of \code{"Unweighted"} (default), \code{"Equal-Spacing"},
#' \code{"Fleiss-Cohen"}, which will calculate the weights accordingly,
#' or a user-specified matrix having the same dimensions as x containing the weights for each cell.
#' @param conf.level confidence level of the interval.
#' If set to \code{NA} (which is the default) no confidence intervals will be calculated.
#' @param \dots further arguments are passed to the function \code{\link{table}},
#' allowing i.e. to set \code{useNA}. This refers only to the vector interface.
#' @importFrom stats qnorm
#' @details Cohen's kappa is the diagonal sum of the (possibly weighted) relative frequencies, corrected for expected values and standardized by its maximum value. \cr
#' The equal-spacing weights (see Cicchetti and Allison 1971) are defined by \deqn{1 - \frac{|i - j|}{r - 1}}
#' \code{r} being the number of columns/rows, and the Fleiss-Cohen weights by \deqn{1 - \frac{(i - j)^2}{(r - 1)^2}}
#' The latter attaches greater importance to closer disagreements
#' @references Cohen, J. (1960) A coefficient of agreement for nominal scales. \emph{Educational and Psychological Measurement}, 20, 37-46.
#' Everitt, B.S. (1968), Moments of statistics kappa and weighted kappa. \emph{The British Journal of Mathematical and Statistical Psychology}, 21, 97-103.
#' Fleiss, J.L., Cohen, J., and Everitt, B.S. (1969), Large sample standard errors of kappa and weighted kappa. \emph{Psychological Bulletin}, 72, 332-327.
#' Cicchetti, D.V., Allison, T. (1971) A New Procedure for Assessing Reliability
#' of Scoring EEG Sleep Recordings \emph{American Journal of EEG Technology}, 11, 101-109.
#' @author David Meyer <david.meyer@r-project.org>, some changes and tweaks Andri Signorell <andri@signorell.net> and
#' integrated for areas by Matthieu Gousseff
#' @return the value of this pseudoKappa
#' @export
#'
#' @examples
#' # from Bortz et. al (1990) Verteilungsfreie Methoden in der Biostatistik, Springer, pp. 459
#' m <- matrix(c(53, 5, 2,
#' 11, 14, 5,
#' 1, 6, 3), nrow=3, byrow=TRUE,
#' dimnames = list(rater1 = c("V","N","P"), rater2 = c("V","N","P")) )
#' # confusion matrix interface
#' CohenKappa(m, weight="Unweighted")
CohenKappa <- function (x, y = NULL,
weights = c("Unweighted", "Equal-Spacing", "Fleiss-Cohen"),
conf.level = NA, ...) {

if (is.character(weights))
weights <- match.arg(weights)

if (!is.null(y)) {
# we can not ensure a reliable weighted kappa for 2 factors with different levels
# so refuse trying it... (unweighted is no problem)

if (!identical(weights, "Unweighted"))
stop("Vector interface for weighted Kappa is not supported. Provide confusion matrix.")

# x and y must have the same levels in order to build a symmetric confusion matrix
x <- factor(x)
y <- factor(y)
lvl <- unique(c(levels(x), levels(y)))
x <- factor(x, levels = lvl)
y <- factor(y, levels = lvl)
x <- table(x, y, ...)

} else {
d <- dim(x)
if (d[1L] != d[2L])
stop("x must be square matrix if provided as confusion matrix")
}

d <- diag(x)
n <- sum(x)
nc <- ncol(x)
colFreqs <- colSums(x)/n
rowFreqs <- rowSums(x)/n

kappa <- function(po, pc) {
(po - pc)/(1 - pc)
}

std <- function(p, pc, k, W = diag(1, ncol = nc, nrow = nc)) {
sqrt((sum(p * sweep(sweep(W, 1, W %*% colSums(p) * (1 - k)),
2, W %*% rowSums(p) * (1 - k))^2) -
(k - pc * (1 - k))^2) / crossprod(1 - pc)/n)
}

if(identical(weights, "Unweighted")) {
po <- sum(d)/n
pc <- as.vector(crossprod(colFreqs, rowFreqs))
k <- kappa(po, pc)
s <- as.vector(std(x/n, pc, k))

} else {

# some kind of weights defined
W <- if (is.matrix(weights))
weights
else if (weights == "Equal-Spacing")
1 - abs(outer(1:nc, 1:nc, "-"))/(nc - 1)
else # weights == "Fleiss-Cohen"
1 - (abs(outer(1:nc, 1:nc, "-"))/(nc - 1))^2

po <- sum(W * x)/n
pc <- sum(W * colFreqs %o% rowFreqs)
k <- kappa(po, pc)
s <- as.vector(std(x/n, pc, k, W))
}

if (is.na(conf.level)) {
res <- k
} else {
ci <- k + c(1, -1) * qnorm((1 - conf.level)/2) * s
res <- c(kappa = k, lwr.ci = ci[1], upr.ci = ci[2])
}

return(res)

}



# KappaTest <- function(x, weights = c("Equal-Spacing", "Fleiss-Cohen"), conf.level = NA) {
# to do, idea is to implement a Kappa test for H0: kappa = 0 as in
# http://support.sas.com/documentation/cdl/en/statugfreq/63124/PDF/default/statugfreq.pdf, pp. 1687
# print( "still to do...." )

# }
12 changes: 11 additions & 1 deletion R/LCZareas.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,18 @@
#' #LCZareas is not to be used directly by user.
LCZareas<-function(sf,column,LCZlevels){
# Creation of a colum with geometry area
sf<-sf %>% mutate(area=st_area(geometry)) %>% drop_units

sf<-tryCatch({
mutate(sf,area=st_area(geometry)) %>% drop_units
},
error=function(e){
message("Some geometries don't seem valid, the function will try to make them valid, it may take a bit longer.")
sf %>% st_make_valid %>% mutate(area=st_area(geometry)) %>% drop_units
}

)


# area by LCZ LCZ
areaLCZ<-sf %>% st_drop_geometry %>% group_by_at(.vars=column) %>%
summarize(area=sum(area,na.rm=T))%>%
Expand Down
6 changes: 3 additions & 3 deletions R/compareLCZ.R
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
#' The expected arguments are the name of each level of the variables contained
#' in column1 and column2, and also a vector called colors.
#' @importFrom ggplot2 geom_sf guides ggtitle aes
#' @importFrom DescTools CohenKappa
#' @import sf dplyr cowplot forcats units tidyr RColorBrewer utils grDevices rlang
#' @return returns graphics of comparison and an object called matConfOut which contains :
#' matConfLong, a confusion matrix in a longer form,
Expand Down Expand Up @@ -386,8 +385,9 @@ matConfLarge<-as.matrix(matConfLarge)

# Add pseudo Kappa Statistic to output to
PseudoWeightedCross<-matConfLarge*100
pseudoK<-DescTools::CohenKappa(x=PseudoWeightedCross)
matConfOut$pseudoK<-pseudoK
# pseudoK<-DescTools::CohenKappa(x=PseudoWeightedCross)
pseudoK<-CohenKappa(x=PseudoWeightedCross)
matConfOut$pseudoK<-pseudoK

areas<-matConfOut$areas
percAgg<-matConfOut$percAgg
Expand Down
39 changes: 39 additions & 0 deletions R/createIntersect.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
createIntersec<-function(sfList, columns, refCrs=NULL, sfWf=NULL){
echInt<-sfList[[1]] %>% select(columns[1])
if (is.null(refCrs)){refCrs<-st_crs(echInt)}
for (i in 2:length(sfList)){
sfProv<-sfList[[i]] %>% select(columns[i])
if (st_crs(sfProv) != refCrs ) {sfProv<-st_transform(sfProv, crs=refCrs)}
echInt<-st_intersection(echInt,sfProv)
}
if (!is.null(sfWf) & length(sfWf) == length(sfList)){
names(echInt)[1:(ncol(echInt)-1)]<-paste0("LCZ",sfWf)
} else { names(echInt)[1:(ncol(echInt)-1)]<-paste0("LCZ",1:length(sfList)) }
echInt
}

# sfBDT_11_78030<-importLCZvect(dirPath="/home/gousseff/Documents/0_DocBiblioTutosPublis/0_ArticlesScientEtThèses/ArticleComparaisonLCZGCWUDAPTEXPERTS/BDT/2011/bdtopo_2_78030",
# file="rsu_lcz.fgb", column="LCZ_PRIMARY")
# class(sfBDT_11_78030)
# sfBDT_22_78030<-importLCZvect(dirPath="/home/gousseff/Documents/0_DocBiblioTutosPublis/0_ArticlesScientEtThèses/ArticleComparaisonLCZGCWUDAPTEXPERTS/BDT/2022/bdtopo_3_78030",
# file="rsu_lcz.fgb", column="LCZ_PRIMARY")
# sf_OSM_11_Auffargis<-importLCZvect(dirPath="/home/gousseff/Documents/0_DocBiblioTutosPublis/0_ArticlesScientEtThèses/ArticleComparaisonLCZGCWUDAPTEXPERTS/OSM/2011/osm_Auffargis/",
# file="rsu_lcz.fgb", column="LCZ_PRIMARY")
# sf_OSM_22_Auffargis<-importLCZvect(dirPath="/home/gousseff/Documents/0_DocBiblioTutosPublis/0_ArticlesScientEtThèses/ArticleComparaisonLCZGCWUDAPTEXPERTS/OSM/2022/osm_Auffargis/",
# file="rsu_lcz.fgb", column="LCZ_PRIMARY")
# sf_WUDAPT_78030<-importLCZvect("/home/gousseff/Documents/0_DocBiblioTutosPublis/0_ArticlesScientEtThèses/ArticleComparaisonLCZGCWUDAPTEXPERTS/WUDAPT",
# file ="wudapt_78030.geojson", column="lcz_primary")
#
# sfList<-list(BDT11 = sfBDT_11_78030, BDT22 = sfBDT_22_78030, OSM11= sf_OSM_11_Auffargis, OSM22 = sf_OSM_22_Auffargis,
# WUDAPT = sf_WUDAPT_78030)
# showLCZ(sfList[[1]])
#
#
#
# intersected<-createIntersec(sfList = sfList, columns = c(rep("LCZ_PRIMARY",4),"lcz_primary"),
# sfWf = c("BDT11","BDT22","OSM11","OSM22","WUDAPT"))
#
#
# test_list<-list(a=c(1,2),b="top",c=TRUE)
# length(test_list)
# for (i in test_list[2:3]) print(str(i))
48 changes: 35 additions & 13 deletions R/importLCZvect.R
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
#' dropped excepted those specified in previous parameters
#' @import dplyr forcats rlang sf
#' @importFrom terra crop
#' @importFrom tidyr drop_na
#' @importFrom terra rast
#' @return returns an sf object containing at least the geoms and the LCZ values,
#' and if specified, columns for the IDs of the geoms and the confidence value of the LCZ levels.
Expand All @@ -36,28 +37,48 @@ importLCZvect<-function(dirPath, file="rsu_lcz.geojson", output="sfFile", column
"101"="A","102"="B","103"="C","104"="D","105"="E","106"="F","107"="G"),
drop=T, verbose=FALSE){
if (!file.exists(dirPath)){stop(message="The directory set in dirPath doesn't seem to exist")}

fileName<-paste0(dirPath,"/",file)
if ( substr(dirPath, start=nchar(dirPath), stop = nchar(dirPath)) == "/") {
fileName<-paste0(dirPath,file)} else {
fileName<-paste0(dirPath,"/",file)
}

# select only the needed column, that is the unempty strings among column, geomID and confid
colonnes<-c(geomID,column,confid)
colonnes<-colonnes[sapply(colonnes,nchar)!=0]

# Check if all the desired columns are present in the source file and only loads the file if the columns exist
### DOESN'T WORK WITH flatgeobuffer
nom<-gsub(pattern="(.+?)(\\.[^.]*$|$)",x=file,replacement="\\1")
query<-paste0("select * from ",nom," limit 0")
sourceCol<-st_read(dsn=fileName, query=query, quiet=!verbose) %>% names
inCol<-colonnes%in%sourceCol
badCol<-colonnes[!inCol]
colErr<-c("It seems that some of the columns you try to import do not exist in the source file,
are you sure you meant ",
paste(badCol)," ?")
if (prod(inCol)==0){ stop(colErr) } else {
if (drop== TRUE) {sfFile<-sf::st_read(dsn=fileName,quiet=!verbose)[,colonnes] } else {sfFile<-sf::st_read(dsn=fileName,quiet=!verbose)[,]}
extension<-gsub(pattern="(.+?)(\\.[^.]*$|$)",x=file,replacement="\\2")
if (extension != ".fgb"){ # Some metadata for fgb files do not specify table/layer names
query<-paste0("select * from ",nom," limit 0") # So this query wouldn't work with such fgb files
sourceCol<-st_read(dsn=fileName, query=query, quiet=!verbose) %>% names
inCol<-colonnes%in%sourceCol
badCol<-colonnes[!inCol]
colErr<-c("It seems that some of the columns you try to import do not exist in the source file,
are you sure you meant ",
paste(badCol),"?")
if (prod(inCol)==0){ stop(colErr) } else {
if (drop== TRUE) {sfFile<-sf::st_read(dsn=fileName,quiet=!verbose)[,colonnes] } else {
sfFile<-sf::st_read(dsn=fileName,quiet=!verbose)[,]}
}
} else {if (extension == ".fgb") {
sfFile<-sf::st_read(dsn=fileName,quiet=!verbose)[,]
sourceCol<-names(sfFile)
inCol<-colonnes%in%sourceCol
badCol<-colonnes[!inCol]
colErr<-c("It seems that some of the columns you try to import do not exist in the source file,
are you sure you meant ",
paste(badCol),"?")
if (prod(inCol)==0){ stop(colErr) }

}

}

# if typeLevels is empty
if (length(typeLevels)==1){
typeLevels<-unique(subset(sfFile,select=column,drop=TRUE))
typeLevels<-unique(subset(sfFile,select=all_of(column),drop=TRUE))
names(typeLevels)<-typeLevels
}

Expand Down Expand Up @@ -102,7 +123,8 @@ if (column!=""){

if(output=="sfFile"){return(sfFile)} else {
if(output=="bBox"){
bBox<-st_bbox(sfFile,crs=st_crs(sfFile)) %>% st_as_sfc
bBox<-st_bbox(sfFile,crs=st_crs(sfFile)) %>% st_as_sfc %>% st_make_valid(geos_keep_collapsed = FALSE)

return(bBox) }
else {
stop("Output must be sfFile to return geoms and LCZ or bBox to return the bounding box")}
Expand Down
Loading

0 comments on commit 18faa3e

Please sign in to comment.